#define PY_SSIZE_T_MAX INT_MAX
#define PY_SSIZE_T_MIN INT_MIN
typedef Py_ssize_t (*lenfunc)(PyObject *);
-#define PyInt_FromSsize_t(x) PyInt_FromLong(x)
-typedef intargfunc ssizeargfunc;
+#define PyInt_FromSsize_t(x) PyInt_FromLong(x)
+typedef intargfunc ssizeargfunc;
#endif
if (state == NULL)
return NULL;
- module = PyImport_ImportModule("sqlalchemy.engine.base");
+ module = PyImport_ImportModule("sqlalchemy.engine.result");
if (module == NULL)
return NULL;
long index;
int key_fallback = 0;
int tuple_check = 0;
-
+
if (PyInt_CheckExact(key)) {
index = PyInt_AS_LONG(key);
} else if (PyLong_CheckExact(key)) {
value = PySequence_GetItem(row, index);
tuple_check = 0;
}
-
+
if (value == NULL)
return NULL;
tmp = BaseRowProxy_subscript(self, name);
if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) {
PyErr_Format(
- PyExc_AttributeError,
+ PyExc_AttributeError,
"Could not locate column in row for column '%.200s'",
PyString_AsString(name)
);
return -1;
}
- module = PyImport_ImportModule("sqlalchemy.engine.base");
+ module = PyImport_ImportModule("sqlalchemy.engine.result");
if (module == NULL)
return -1;
"""
-import datetime, itertools, re
+import datetime
+import itertools
+import re
-from sqlalchemy import exc, schema, sql, util, processors
-from sqlalchemy.sql import operators as sql_operators, expression as sql_expr
-from sqlalchemy.sql import compiler, visitors
-from sqlalchemy.engine import base as engine_base, default, reflection
-from sqlalchemy import types as sqltypes
+from ... import exc, schema, sql, util, processors
+from ...sql import expression as sql_expr
+from ...sql import compiler, visitors
+from ...engine import result as _result, default, reflection
+from ... import types as sqltypes
class _StringType(sqltypes.String):
for column in self.cursor.description:
if column[1] in ('Long Binary', 'Long', 'Long Unicode'):
return MaxDBResultProxy(self)
- return engine_base.ResultProxy(self)
+ return _result.ResultProxy(self)
@property
def rowcount(self):
return self._execute_scalar("SELECT %s.NEXTVAL FROM DUAL" % (
self.dialect.identifier_preparer.format_sequence(seq)))
-class MaxDBCachedColumnRow(engine_base.RowProxy):
+class MaxDBCachedColumnRow(_result.RowProxy):
"""A RowProxy that only runs result_processors once per column."""
def __init__(self, parent, row):
raise AttributeError(name)
-class MaxDBResultProxy(engine_base.ResultProxy):
+class MaxDBResultProxy(_result.ResultProxy):
_process_row = MaxDBCachedColumnRow
class MaxDBCompiler(compiler.SQLCompiler):
"""
-from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, \
+from .base import OracleCompiler, OracleDialect, \
RESERVED_WORDS, OracleExecutionContext
-from sqlalchemy.dialects.oracle import base as oracle
-from sqlalchemy.engine import base
+from . import base as oracle
+from ...engine import result as _result
from sqlalchemy import types as sqltypes, util, exc, processors
from datetime import datetime
import random
for column in self.cursor.description:
type_code = column[1]
if type_code in self.dialect._cx_oracle_binary_types:
- result = base.BufferedColumnResultProxy(self)
+ result = _result.BufferedColumnResultProxy(self)
if result is None:
- result = base.ResultProxy(self)
+ result = _result.ResultProxy(self)
if hasattr(self, 'out_parameters'):
if self.compiled_parameters is not None and \
return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
_execute_scalar(unicode(stmt))
-class ReturningResultProxy(base.FullyBufferedResultProxy):
+class ReturningResultProxy(_result.FullyBufferedResultProxy):
"""Result proxy which stuffs the _returning clause + outparams into the fetch."""
def __init__(self, context, returning_params):
from sqlalchemy import sql, types as sqltypes, util
from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, OracleExecutionContext
-from sqlalchemy.engine import base, default
+from sqlalchemy.engine import result as _result
from sqlalchemy.sql import expression
import collections
pass
self.statement.close()
- return base.ResultProxy(self)
+ return _result.ResultProxy(self)
def create_cursor(self):
cursor = self._dbapi_connection.cursor()
return cursor
-class ReturningResultProxy(base.FullyBufferedResultProxy):
+class ReturningResultProxy(_result.FullyBufferedResultProxy):
"""ResultProxy backed by the RETURNING ResultSet results."""
from ... import util, exc
from ...util.compat import decimal
from ... import processors
-from ...engine import base
+from ...engine import result as _result
from ...sql import expression
from ... import types as sqltypes
from .base import PGDialect, PGCompiler, \
self._log_notices(self.cursor)
if self.__is_server_side:
- return base.BufferedRowResultProxy(self)
+ return _result.BufferedRowResultProxy(self)
else:
- return base.ResultProxy(self)
+ return _result.ResultProxy(self)
def _log_notices(self, cursor):
for notice in cursor.connection.notices:
# not sure what this was used for
#import sqlalchemy.databases
-from .base import (
- BufferedColumnResultProxy,
- BufferedColumnRow,
- BufferedRowResultProxy,
+from .interfaces import (
Compiled,
Connectable,
- Connection,
Dialect,
- Engine,
ExecutionContext,
+ TypeCompiler
+)
+
+from .base import (
+ Connection,
+ Engine,
NestedTransaction,
- ResultProxy,
RootTransaction,
- RowProxy,
Transaction,
TwoPhaseTransaction,
- TypeCompiler
)
-from . import strategies
-from .. import util
+from .result import (
+ BufferedColumnResultProxy,
+ BufferedColumnRow,
+ BufferedRowResultProxy,
+ ResultProxy,
+ RowProxy,
+ )
-__all__ = (
- 'BufferedColumnResultProxy',
- 'BufferedColumnRow',
- 'BufferedRowResultProxy',
- 'Compiled',
- 'Connectable',
- 'Connection',
- 'Dialect',
- 'Engine',
- 'ExecutionContext',
- 'NestedTransaction',
- 'ResultProxy',
- 'RootTransaction',
- 'RowProxy',
- 'Transaction',
- 'TwoPhaseTransaction',
- 'TypeCompiler',
- 'create_engine',
- 'engine_from_config',
+from .util import (
+ connection_memoize
)
+from . import util, strategies
default_strategy = 'plain'
+
def create_engine(*args, **kwargs):
"""Create a new :class:`.Engine` instance.
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
``**kwargs`` takes a wide variety of options which are routed
- towards their appropriate components. Arguments may be
- specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
- :class:`.Pool`. Specific dialects also accept keyword arguments that
- are unique to that dialect. Here, we describe the parameters
- that are common to most :func:`.create_engine()` usage.
+ towards their appropriate components. Arguments may be specific
+ to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as
+ the :class:`.Pool`. Specific dialects also accept keyword
+ arguments that are unique to that dialect. Here, we describe the
+ parameters that are common to most :func:`.create_engine()` usage.
Once established, the newly resulting :class:`.Engine` will
request a connection from the underlying :class:`.Pool` once
id.
:param pool_size=5: the number of connections to keep open
- inside the connection pool. This used with :class:`~sqlalchemy.pool.QueuePool` as
+ inside the connection pool. This used with
+ :class:`~sqlalchemy.pool.QueuePool` as
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
strategy = strategies.strategies[strategy]
return strategy.create(*args, **kwargs)
+
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
arguments.
"""
- opts = _coerce_config(configuration, prefix)
+ opts = util._coerce_config(configuration, prefix)
opts.update(kwargs)
url = opts.pop('url')
return create_engine(url, **opts)
-def _coerce_config(configuration, prefix):
- """Convert configuration values to expected types."""
-
- options = dict((key[len(prefix):], configuration[key])
- for key in configuration
- if key.startswith(prefix))
- for option, type_ in (
- ('convert_unicode', util.bool_or_str('force')),
- ('pool_timeout', int),
- ('echo', util.bool_or_str('debug')),
- ('echo_pool', util.bool_or_str('debug')),
- ('pool_recycle', int),
- ('pool_size', int),
- ('max_overflow', int),
- ('pool_threadlocal', bool),
- ('use_native_unicode', bool),
- ):
- util.coerce_kw_type(options, option, type_)
- return options
+
+__all__ = (
+ 'create_engine',
+ 'engine_from_config',
+ )
+
+
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Basic components for SQL execution and interfacing with DB-API.
+"""Defines :class:`.Connection` and :class:`.Engine`.
-Defines the basic components used to interface DB-API modules with
-higher-level statement-construction, connection-management, execution
-and result contexts.
"""
-__all__ = [
- 'BufferedColumnResultProxy', 'BufferedColumnRow',
- 'BufferedRowResultProxy', 'Compiled', 'Connectable', 'Connection',
- 'Dialect', 'Engine', 'ExecutionContext', 'NestedTransaction',
- 'ResultProxy', 'RootTransaction', 'RowProxy',
- 'StringIO', 'Transaction', 'TwoPhaseTransaction',
- 'connection_memoize']
-import StringIO
import sys
-from itertools import izip, chain
-from .. import exc, schema, util, types, log, interfaces, \
- event, events
+from itertools import chain
+from .. import exc, schema, util, log, interfaces
from ..sql import expression, util as sql_util
-import collections
-
-class Dialect(object):
- """Define the behavior of a specific database and DB-API combination.
-
- Any aspect of metadata definition, SQL query generation,
- execution, result-set handling, or anything else which varies
- between databases is defined under the general category of the
- Dialect. The Dialect acts as a factory for other
- database-specific object implementations including
- ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
-
- All Dialects implement the following attributes:
-
- name
- identifying name for the dialect from a DBAPI-neutral point of view
- (i.e. 'sqlite')
-
- driver
- identifying name for the dialect's DBAPI
-
- positional
- True if the paramstyle for this Dialect is positional.
-
- paramstyle
- the paramstyle to be used (some DB-APIs support multiple
- paramstyles).
-
- convert_unicode
- True if Unicode conversion should be applied to all ``str``
- types.
-
- encoding
- type of encoding to use for unicode, usually defaults to
- 'utf-8'.
-
- statement_compiler
- a :class:`~Compiled` class used to compile SQL statements
-
- ddl_compiler
- a :class:`~Compiled` class used to compile DDL statements
-
- server_version_info
- a tuple containing a version number for the DB backend in use.
- This value is only available for supporting dialects, and is
- typically populated during the initial connection to the database.
-
- default_schema_name
- the name of the default schema. This value is only available for
- supporting dialects, and is typically populated during the
- initial connection to the database.
-
- execution_ctx_cls
- a :class:`.ExecutionContext` class used to handle statement execution
-
- execute_sequence_format
- either the 'tuple' or 'list' type, depending on what cursor.execute()
- accepts for the second argument (they vary).
-
- preparer
- a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
- quote identifiers.
-
- supports_alter
- ``True`` if the database supports ``ALTER TABLE``.
-
- max_identifier_length
- The maximum length of identifier names.
-
- supports_unicode_statements
- Indicate whether the DB-API can receive SQL statements as Python
- unicode strings
-
- supports_unicode_binds
- Indicate whether the DB-API can receive string bind parameters
- as Python unicode strings
-
- supports_sane_rowcount
- Indicate whether the dialect properly implements rowcount for
- ``UPDATE`` and ``DELETE`` statements.
-
- supports_sane_multi_rowcount
- Indicate whether the dialect properly implements rowcount for
- ``UPDATE`` and ``DELETE`` statements when executed via
- executemany.
-
- preexecute_autoincrement_sequences
- True if 'implicit' primary key functions must be executed separately
- in order to get their value. This is currently oriented towards
- Postgresql.
-
- implicit_returning
- use RETURNING or equivalent during INSERT execution in order to load
- newly generated primary keys and other column defaults in one execution,
- which are then available via inserted_primary_key.
- If an insert statement has returning() specified explicitly,
- the "implicit" functionality is not used and inserted_primary_key
- will not be available.
-
- dbapi_type_map
- A mapping of DB-API type objects present in this Dialect's
- DB-API implementation mapped to TypeEngine implementations used
- by the dialect.
-
- This is used to apply types to result sets based on the DB-API
- types present in cursor.description; it only takes effect for
- result sets against textual statements where no explicit
- typemap was present.
-
- colspecs
- A dictionary of TypeEngine classes from sqlalchemy.types mapped
- to subclasses that are specific to the dialect class. This
- dictionary is class-level only and is not accessed from the
- dialect instance itself.
-
- supports_default_values
- Indicates if the construct ``INSERT INTO tablename DEFAULT
- VALUES`` is supported
-
- supports_sequences
- Indicates if the dialect supports CREATE SEQUENCE or similar.
-
- sequences_optional
- If True, indicates if the "optional" flag on the Sequence() construct
- should signal to not generate a CREATE SEQUENCE. Applies only to
- dialects that support sequences. Currently used only to allow Postgresql
- SERIAL to be used on a column that specifies Sequence() for usage on
- other backends.
-
- supports_native_enum
- Indicates if the dialect supports a native ENUM construct.
- This will prevent types.Enum from generating a CHECK
- constraint when that type is used.
-
- supports_native_boolean
- Indicates if the dialect supports a native boolean construct.
- This will prevent types.Boolean from generating a CHECK
- constraint when that type is used.
-
- """
-
- def create_connect_args(self, url):
- """Build DB-API compatible connection arguments.
-
- Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
- consisting of a `*args`/`**kwargs` suitable to send directly
- to the dbapi's connect function.
-
- """
-
- raise NotImplementedError()
-
- @classmethod
- def type_descriptor(cls, typeobj):
- """Transform a generic type to a dialect-specific type.
-
- Dialect classes will usually use the
- :func:`~sqlalchemy.types.adapt_type` function in the types module to
- make this job easy.
-
- The returned result is cached *per dialect class* so can
- contain no dialect-instance state.
-
- """
-
- raise NotImplementedError()
-
- def initialize(self, connection):
- """Called during strategized creation of the dialect with a
- connection.
-
- Allows dialects to configure options based on server version info or
- other properties.
-
- The connection passed here is a SQLAlchemy Connection object,
- with full capabilities.
-
- The initalize() method of the base dialect should be called via
- super().
-
- """
-
- pass
-
- def reflecttable(self, connection, table, include_columns=None):
- """Load table description from the database.
-
- Given a :class:`.Connection` and a
- :class:`~sqlalchemy.schema.Table` object, reflect its columns and
- properties from the database. If include_columns (a list or
- set) is specified, limit the autoload to the given column
- names.
-
- The default implementation uses the
- :class:`~sqlalchemy.engine.reflection.Inspector` interface to
- provide the output, building upon the granular table/column/
- constraint etc. methods of :class:`.Dialect`.
-
- """
-
- raise NotImplementedError()
-
- def get_columns(self, connection, table_name, schema=None, **kw):
- """Return information about columns in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name`, and an optional string `schema`, return column
- information as a list of dictionaries with these keys:
-
- name
- the column's name
-
- type
- [sqlalchemy.types#TypeEngine]
-
- nullable
- boolean
-
- default
- the column's default value
-
- autoincrement
- boolean
-
- sequence
- a dictionary of the form
- {'name' : str, 'start' :int, 'increment': int}
-
- Additional column attributes may be present.
- """
-
- raise NotImplementedError()
-
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- """Return information about primary keys in `table_name`.
-
-
- Deprecated. This method is only called by the default
- implementation of :meth:`get_pk_constraint()`. Dialects should
- instead implement this method directly.
-
- """
-
- raise NotImplementedError()
-
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
- """Return information about the primary key constraint on
- table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name`, and an optional string `schema`, return primary
- key information as a dictionary with these keys:
-
- constrained_columns
- a list of column names that make up the primary key
-
- name
- optional name of the primary key constraint.
-
- """
- raise NotImplementedError()
-
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- """Return information about foreign_keys in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name`, and an optional string `schema`, return foreign
- key information as a list of dicts with these keys:
-
- name
- the constraint's name
-
- constrained_columns
- a list of column names that make up the foreign key
-
- referred_schema
- the name of the referred schema
-
- referred_table
- the name of the referred table
-
- referred_columns
- a list of column names in the referred table that correspond to
- constrained_columns
- """
-
- raise NotImplementedError()
-
- def get_table_names(self, connection, schema=None, **kw):
- """Return a list of table names for `schema`."""
-
- raise NotImplementedError
-
- def get_view_names(self, connection, schema=None, **kw):
- """Return a list of all view names available in the database.
-
- schema:
- Optional, retrieve names from a non-default schema.
- """
-
- raise NotImplementedError()
-
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- """Return view definition.
-
- Given a :class:`.Connection`, a string
- `view_name`, and an optional string `schema`, return the view
- definition.
- """
-
- raise NotImplementedError()
-
- def get_indexes(self, connection, table_name, schema=None, **kw):
- """Return information about indexes in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name` and an optional string `schema`, return index
- information as a list of dictionaries with these keys:
-
- name
- the index's name
-
- column_names
- list of column names in order
-
- unique
- boolean
- """
-
- raise NotImplementedError()
-
- def normalize_name(self, name):
- """convert the given name to lowercase if it is detected as
- case insensitive.
-
- this method is only used if the dialect defines
- requires_name_normalize=True.
-
- """
- raise NotImplementedError()
-
- def denormalize_name(self, name):
- """convert the given name to a case insensitive identifier
- for the backend if it is an all-lowercase name.
-
- this method is only used if the dialect defines
- requires_name_normalize=True.
-
- """
- raise NotImplementedError()
-
- def has_table(self, connection, table_name, schema=None):
- """Check the existence of a particular table in the database.
-
- Given a :class:`.Connection` object and a string
- `table_name`, return True if the given table (possibly within
- the specified `schema`) exists in the database, False
- otherwise.
- """
-
- raise NotImplementedError()
-
- def has_sequence(self, connection, sequence_name, schema=None):
- """Check the existence of a particular sequence in the database.
-
- Given a :class:`.Connection` object and a string
- `sequence_name`, return True if the given sequence exists in
- the database, False otherwise.
- """
-
- raise NotImplementedError()
-
- def _get_server_version_info(self, connection):
- """Retrieve the server version info from the given connection.
-
- This is used by the default implementation to populate the
- "server_version_info" attribute and is called exactly
- once upon first connect.
-
- """
-
- raise NotImplementedError()
-
- def _get_default_schema_name(self, connection):
- """Return the string name of the currently selected schema from
- the given connection.
-
- This is used by the default implementation to populate the
- "default_schema_name" attribute and is called exactly
- once upon first connect.
-
- """
-
- raise NotImplementedError()
-
- def do_begin(self, connection):
- """Provide an implementation of *connection.begin()*, given a
- DB-API connection."""
-
- raise NotImplementedError()
-
- def do_rollback(self, connection):
- """Provide an implementation of *connection.rollback()*, given
- a DB-API connection."""
-
- raise NotImplementedError()
-
- def create_xid(self):
- """Create a two-phase transaction ID.
-
- This id will be passed to do_begin_twophase(),
- do_rollback_twophase(), do_commit_twophase(). Its format is
- unspecified.
- """
-
- raise NotImplementedError()
-
- def do_commit(self, connection):
- """Provide an implementation of *connection.commit()*, given a
- DB-API connection."""
-
- raise NotImplementedError()
-
- def do_savepoint(self, connection, name):
- """Create a savepoint with the given name on a SQLAlchemy
- connection."""
-
- raise NotImplementedError()
-
- def do_rollback_to_savepoint(self, connection, name):
- """Rollback a SQL Alchemy connection to the named savepoint."""
-
- raise NotImplementedError()
-
- def do_release_savepoint(self, connection, name):
- """Release the named savepoint on a SQL Alchemy connection."""
-
- raise NotImplementedError()
-
- def do_begin_twophase(self, connection, xid):
- """Begin a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_prepare_twophase(self, connection, xid):
- """Prepare a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- """Rollback a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- """Commit a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_recover_twophase(self, connection):
- """Recover list of uncommited prepared two phase transaction
- identifiers on the given connection."""
-
- raise NotImplementedError()
-
- def do_executemany(self, cursor, statement, parameters, context=None):
- """Provide an implementation of ``cursor.executemany(statement,
- parameters)``."""
-
- raise NotImplementedError()
-
- def do_execute(self, cursor, statement, parameters, context=None):
- """Provide an implementation of ``cursor.execute(statement,
- parameters)``."""
-
- raise NotImplementedError()
-
- def do_execute_no_params(self, cursor, statement, parameters, context=None):
- """Provide an implementation of ``cursor.execute(statement)``.
-
- The parameter collection should not be sent.
-
- """
-
- raise NotImplementedError()
-
- def is_disconnect(self, e, connection, cursor):
- """Return True if the given DB-API error indicates an invalid
- connection"""
-
- raise NotImplementedError()
-
- def connect(self):
- """return a callable which sets up a newly created DBAPI connection.
-
- The callable accepts a single argument "conn" which is the
- DBAPI connection itself. It has no return value.
-
- This is used to set dialect-wide per-connection options such as
- isolation modes, unicode modes, etc.
-
- If a callable is returned, it will be assembled into a pool listener
- that receives the direct DBAPI connection, with all wrappers removed.
-
- If None is returned, no listener will be generated.
-
- """
- return None
-
- def reset_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, revert its isolation to the default."""
-
- raise NotImplementedError()
-
- def set_isolation_level(self, dbapi_conn, level):
- """Given a DBAPI connection, set its isolation level."""
-
- raise NotImplementedError()
-
- def get_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, return its isolation level."""
-
- raise NotImplementedError()
-
-
-class ExecutionContext(object):
- """A messenger object for a Dialect that corresponds to a single
- execution.
-
- ExecutionContext should have these data members:
-
- connection
- Connection object which can be freely used by default value
- generators to execute SQL. This Connection should reference the
- same underlying connection/transactional resources of
- root_connection.
-
- root_connection
- Connection object which is the source of this ExecutionContext. This
- Connection may have close_with_result=True set, in which case it can
- only be used once.
-
- dialect
- dialect which created this ExecutionContext.
-
- cursor
- DB-API cursor procured from the connection,
-
- compiled
- if passed to constructor, sqlalchemy.engine.base.Compiled object
- being executed,
-
- statement
- string version of the statement to be executed. Is either
- passed to the constructor, or must be created from the
- sql.Compiled object by the time pre_exec() has completed.
-
- parameters
- bind parameters passed to the execute() method. For compiled
- statements, this is a dictionary or list of dictionaries. For
- textual statements, it should be in a format suitable for the
- dialect's paramstyle (i.e. dict or list of dicts for non
- positional, list or list of lists/tuples for positional).
-
- isinsert
- True if the statement is an INSERT.
-
- isupdate
- True if the statement is an UPDATE.
-
- should_autocommit
- True if the statement is a "committable" statement.
-
- prefetch_cols
- a list of Column objects for which a client-side default
- was fired off. Applies to inserts and updates.
-
- postfetch_cols
- a list of Column objects for which a server-side default or
- inline SQL expression value was fired off. Applies to inserts
- and updates.
- """
-
- def create_cursor(self):
- """Return a new cursor generated from this ExecutionContext's
- connection.
-
- Some dialects may wish to change the behavior of
- connection.cursor(), such as postgresql which may return a PG
- "server side" cursor.
- """
-
- raise NotImplementedError()
-
- def pre_exec(self):
- """Called before an execution of a compiled statement.
-
- If a compiled statement was passed to this ExecutionContext,
- the `statement` and `parameters` datamembers must be
- initialized after this statement is complete.
- """
-
- raise NotImplementedError()
-
- def post_exec(self):
- """Called after the execution of a compiled statement.
-
- If a compiled statement was passed to this ExecutionContext,
- the `last_insert_ids`, `last_inserted_params`, etc.
- datamembers should be available after this method completes.
- """
-
- raise NotImplementedError()
-
- def result(self):
- """Return a result object corresponding to this ExecutionContext.
-
- Returns a ResultProxy.
- """
-
- raise NotImplementedError()
-
- def handle_dbapi_exception(self, e):
- """Receive a DBAPI exception which occurred upon execute, result
- fetch, etc."""
-
- raise NotImplementedError()
-
- def should_autocommit_text(self, statement):
- """Parse the given textual statement and return True if it refers to
- a "committable" statement"""
-
- raise NotImplementedError()
-
- def lastrow_has_defaults(self):
- """Return True if the last INSERT or UPDATE row contained
- inlined or database-side defaults.
- """
-
- raise NotImplementedError()
-
- def get_rowcount(self):
- """Return the DBAPI ``cursor.rowcount`` value, or in some
- cases an interpreted value.
-
- See :attr:`.ResultProxy.rowcount` for details on this.
-
- """
-
- raise NotImplementedError()
-
-
-class Compiled(object):
- """Represent a compiled SQL or DDL expression.
-
- The ``__str__`` method of the ``Compiled`` object should produce
- the actual text of the statement. ``Compiled`` objects are
- specific to their underlying database dialect, and also may
- or may not be specific to the columns referenced within a
- particular set of bind parameters. In no case should the
- ``Compiled`` object be dependent on the actual values of those
- bind parameters, even though it may reference those values as
- defaults.
- """
-
- def __init__(self, dialect, statement, bind=None):
- """Construct a new ``Compiled`` object.
-
- :param dialect: ``Dialect`` to compile against.
-
- :param statement: ``ClauseElement`` to be compiled.
-
- :param bind: Optional Engine or Connection to compile this
- statement against.
- """
-
- self.dialect = dialect
- self.bind = bind
- if statement is not None:
- self.statement = statement
- self.can_execute = statement.supports_execution
- self.string = self.process(self.statement)
-
- @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
- "within the constructor.")
- def compile(self):
- """Produce the internal string representation of this element."""
- pass
-
- @property
- def sql_compiler(self):
- """Return a Compiled that is capable of processing SQL expressions.
-
- If this compiler is one, it would likely just return 'self'.
-
- """
-
- raise NotImplementedError()
-
- def process(self, obj, **kwargs):
- return obj._compiler_dispatch(self, **kwargs)
-
- def __str__(self):
- """Return the string text of the generated SQL or DDL."""
-
- return self.string or ''
-
- def construct_params(self, params=None):
- """Return the bind params for this compiled object.
-
- :param params: a dict of string/object pairs whose values will
- override bind values compiled in to the
- statement.
- """
-
- raise NotImplementedError()
-
- @property
- def params(self):
- """Return the bind params for this compiled object."""
- return self.construct_params()
-
- def execute(self, *multiparams, **params):
- """Execute this compiled object."""
-
- e = self.bind
- if e is None:
- raise exc.UnboundExecutionError(
- "This Compiled object is not bound to any Engine "
- "or Connection.")
- return e._execute_compiled(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Execute this compiled object and return the result's
- scalar value."""
-
- return self.execute(*multiparams, **params).scalar()
-
-
-class TypeCompiler(object):
- """Produces DDL specification for TypeEngine objects."""
-
- def __init__(self, dialect):
- self.dialect = dialect
-
- def process(self, type_):
- return type_._compiler_dispatch(self)
-
-
-class Connectable(object):
- """Interface for an object which supports execution of SQL constructs.
-
- The two implementations of :class:`.Connectable` are :class:`.Connection` and
- :class:`.Engine`.
-
- Connectable must also implement the 'dialect' member which references a
- :class:`.Dialect` instance.
-
- """
-
- dispatch = event.dispatcher(events.ConnectionEvents)
-
-
- def connect(self, **kwargs):
- """Return a :class:`.Connection` object.
-
- Depending on context, this may be ``self`` if this object
- is already an instance of :class:`.Connection`, or a newly
- procured :class:`.Connection` if this object is an instance
- of :class:`.Engine`.
-
- """
-
- def contextual_connect(self):
- """Return a :class:`.Connection` object which may be part of an ongoing
- context.
-
- Depending on context, this may be ``self`` if this object
- is already an instance of :class:`.Connection`, or a newly
- procured :class:`.Connection` if this object is an instance
- of :class:`.Engine`.
-
- """
-
- raise NotImplementedError()
-
- @util.deprecated("0.7", "Use the create() method on the given schema "
- "object directly, i.e. :meth:`.Table.create`, "
- ":meth:`.Index.create`, :meth:`.MetaData.create_all`")
- def create(self, entity, **kwargs):
- """Emit CREATE statements for the given schema entity."""
-
- raise NotImplementedError()
-
- @util.deprecated("0.7", "Use the drop() method on the given schema "
- "object directly, i.e. :meth:`.Table.drop`, "
- ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
- def drop(self, entity, **kwargs):
- """Emit DROP statements for the given schema entity."""
-
- raise NotImplementedError()
-
- def execute(self, object, *multiparams, **params):
- """Executes the given construct and returns a :class:`.ResultProxy`."""
- raise NotImplementedError()
-
- def scalar(self, object, *multiparams, **params):
- """Executes and returns the first column of the first row.
-
- The underlying cursor is closed after execution.
- """
- raise NotImplementedError()
-
- def _run_visitor(self, visitorcallable, element,
- **kwargs):
- raise NotImplementedError()
-
- def _execute_clauseelement(self, elem, multiparams=None, params=None):
- raise NotImplementedError()
-
+from .interfaces import Connectable, Compiled
+from .util import _distill_params
class Connection(Connectable):
"""Provides high-level functionality for a wrapped DB-API connection.
"Unexecutable object type: %s" %
type(object))
- def __distill_params(self, multiparams, params):
- """Given arguments from the calling form *multiparams, **params,
- return a list of bind parameter structures, usually a list of
- dictionaries.
-
- In the case of 'raw' execution which accepts positional parameters,
- it may be a list of tuples or lists.
-
- """
-
- if not multiparams:
- if params:
- return [params]
- else:
- return []
- elif len(multiparams) == 1:
- zero = multiparams[0]
- if isinstance(zero, (list, tuple)):
- if not zero or hasattr(zero[0], '__iter__') and \
- not hasattr(zero[0], 'strip'):
- # execute(stmt, [{}, {}, {}, ...])
- # execute(stmt, [(), (), (), ...])
- return zero
- else:
- # execute(stmt, ("value", "value"))
- return [zero]
- elif hasattr(zero, 'keys'):
- # execute(stmt, {"key":"value"})
- return [zero]
- else:
- # execute(stmt, "value")
- return [[zero]]
- else:
- if hasattr(multiparams[0], '__iter__') and \
- not hasattr(multiparams[0], 'strip'):
- return multiparams
- else:
- return [multiparams]
def _execute_function(self, func, multiparams, params):
"""Execute a sql.FunctionElement object."""
elem, multiparams, params = \
fn(self, elem, multiparams, params)
- distilled_params = self.__distill_params(multiparams, params)
+ distilled_params = _distill_params(multiparams, params)
if distilled_params:
keys = distilled_params[0].keys()
else:
fn(self, compiled, multiparams, params)
dialect = self.dialect
- parameters=self.__distill_params(multiparams, params)
+ parameters = _distill_params(multiparams, params)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
fn(self, statement, multiparams, params)
dialect = self.dialect
- parameters = self.__distill_params(multiparams, params)
+ parameters = _distill_params(multiparams, params)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_statement,
basestring: _execute_text
}
- @util.deprecated("0.7", "Use the create() method on the given schema "
- "object directly, i.e. :meth:`.Table.create`, "
- ":meth:`.Index.create`, :meth:`.MetaData.create_all`")
- def create(self, entity, **kwargs):
- """Emit CREATE statements for the given schema entity."""
-
- return self.engine.create(entity, connection=self, **kwargs)
-
- @util.deprecated("0.7", "Use the drop() method on the given schema "
- "object directly, i.e. :meth:`.Table.drop`, "
- ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
- def drop(self, entity, **kwargs):
- """Emit DROP statements for the given schema entity."""
-
- return self.engine.drop(entity, connection=self, **kwargs)
-
- @util.deprecated("0.7", "Use autoload=True with :class:`.Table`, "
- "or use the :class:`.Inspector` object.")
- def reflecttable(self, table, include_columns=None):
- """Load table description from the database.
-
- Given a :class:`.Table` object, reflect its columns and
- properties from the database, populating the given :class:`.Table`
- object with attributes.. If include_columns (a list or
- set) is specified, limit the autoload to the given column
- names.
-
- The default implementation uses the
- :class:`.Inspector` interface to
- provide the output, building upon the granular table/column/
- constraint etc. methods of :class:`.Dialect`.
-
- """
- return self.engine.reflecttable(table, self, include_columns)
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
"""
self.pool = self.pool._replace()
- @util.deprecated("0.7", "Use the create() method on the given schema "
- "object directly, i.e. :meth:`.Table.create`, "
- ":meth:`.Index.create`, :meth:`.MetaData.create_all`")
- def create(self, entity, connection=None, **kwargs):
- """Emit CREATE statements for the given schema entity."""
-
- from sqlalchemy.engine import ddl
-
- self._run_visitor(ddl.SchemaGenerator, entity,
- connection=connection, **kwargs)
-
- @util.deprecated("0.7", "Use the drop() method on the given schema "
- "object directly, i.e. :meth:`.Table.drop`, "
- ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
- def drop(self, entity, connection=None, **kwargs):
- """Emit DROP statements for the given schema entity."""
-
- from sqlalchemy.engine import ddl
-
- self._run_visitor(ddl.SchemaDropper, entity,
- connection=connection, **kwargs)
def _execute_default(self, default):
connection = self.contextual_connect()
try:
return connection._execute_default(default, (), {})
- finally:
- connection.close()
-
- @property
- @util.deprecated("0.7",
- "Use :attr:`~sqlalchemy.sql.expression.func` to create function constructs.")
- def func(self):
- return expression._FunctionGenerator(bind=self)
-
- @util.deprecated("0.7",
- "Use :func:`.expression.text` to create text constructs.")
- def text(self, text, *args, **kwargs):
- """Return a :func:`~sqlalchemy.sql.expression.text` construct,
- bound to this engine.
-
- This is equivalent to::
-
- text("SELECT * FROM table", bind=engine)
-
- """
+ finally:
+ connection.close()
- return expression.text(text, bind=self, *args, **kwargs)
def _run_visitor(self, visitorcallable, element,
connection=None, **kwargs):
if connection is None:
conn.close()
- @util.deprecated("0.7", "Use autoload=True with :class:`.Table`, "
- "or use the :class:`.Inspector` object.")
- def reflecttable(self, table, connection=None, include_columns=None):
- """Load table description from the database.
-
- Uses the given :class:`.Connection`, or if None produces
- its own :class:`.Connection`, and passes the ``table``
- and ``include_columns`` arguments onto that
- :class:`.Connection` object's :meth:`.Connection.reflecttable`
- method. The :class:`.Table` object is then populated
- with new attributes.
-
- """
- if connection is None:
- conn = self.contextual_connect()
- else:
- conn = connection
- try:
- self.dialect.reflecttable(conn, table, include_columns)
- finally:
- if connection is None:
- conn.close()
-
def has_table(self, table_name, schema=None):
return self.run_callable(self.dialect.has_table, table_name, schema)
return self.pool.unique_connection()
-# This reconstructor is necessary so that pickles with the C extension or
-# without use the same Binary format.
-try:
- # We need a different reconstructor on the C extension so that we can
- # add extra checks that fields have correctly been initialized by
- # __setstate__.
- from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
-
- # The extra function embedding is needed so that the
- # reconstructor function has the same signature whether or not
- # the extension is present.
- def rowproxy_reconstructor(cls, state):
- return safe_rowproxy_reconstructor(cls, state)
-except ImportError:
- def rowproxy_reconstructor(cls, state):
- obj = cls.__new__(cls)
- obj.__setstate__(state)
- return obj
-
-try:
- from sqlalchemy.cresultproxy import BaseRowProxy
-except ImportError:
- class BaseRowProxy(object):
- __slots__ = ('_parent', '_row', '_processors', '_keymap')
-
- def __init__(self, parent, row, processors, keymap):
- """RowProxy objects are constructed by ResultProxy objects."""
-
- self._parent = parent
- self._row = row
- self._processors = processors
- self._keymap = keymap
-
- def __reduce__(self):
- return (rowproxy_reconstructor,
- (self.__class__, self.__getstate__()))
-
- def values(self):
- """Return the values represented by this RowProxy as a list."""
- return list(self)
-
- def __iter__(self):
- for processor, value in izip(self._processors, self._row):
- if processor is None:
- yield value
- else:
- yield processor(value)
-
- def __len__(self):
- return len(self._row)
-
- def __getitem__(self, key):
- try:
- processor, obj, index = self._keymap[key]
- except KeyError:
- processor, obj, index = self._parent._key_fallback(key)
- except TypeError:
- if isinstance(key, slice):
- l = []
- for processor, value in izip(self._processors[key],
- self._row[key]):
- if processor is None:
- l.append(value)
- else:
- l.append(processor(value))
- return tuple(l)
- else:
- raise
- if index is None:
- raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in result set! "
- "try 'use_labels' option on select statement." % key)
- if processor is not None:
- return processor(self._row[index])
- else:
- return self._row[index]
-
- def __getattr__(self, name):
- try:
- return self[name]
- except KeyError, e:
- raise AttributeError(e.args[0])
-
-
-class RowProxy(BaseRowProxy):
- """Proxy values from a single cursor row.
-
- Mostly follows "ordered dictionary" behavior, mapping result
- values to the string-based column name, the integer position of
- the result in the row, as well as Column instances which can be
- mapped to the original Columns that produced this result set (for
- results that correspond to constructed SQL expressions).
- """
- __slots__ = ()
-
- def __contains__(self, key):
- return self._parent._has_key(self._row, key)
-
- def __getstate__(self):
- return {
- '_parent': self._parent,
- '_row': tuple(self)
- }
-
- def __setstate__(self, state):
- self._parent = parent = state['_parent']
- self._row = state['_row']
- self._processors = parent._processors
- self._keymap = parent._keymap
-
- __hash__ = None
-
- def __eq__(self, other):
- return other is self or other == tuple(self)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __repr__(self):
- return repr(tuple(self))
-
- def has_key(self, key):
- """Return True if this RowProxy contains the given key."""
-
- return self._parent._has_key(self._row, key)
-
- def items(self):
- """Return a list of tuples, each tuple containing a key/value pair."""
- # TODO: no coverage here
- return [(key, self[key]) for key in self.iterkeys()]
-
- def keys(self):
- """Return the list of keys as strings represented by this RowProxy."""
-
- return self._parent.keys
-
- def iterkeys(self):
- return iter(self._parent.keys)
-
- def itervalues(self):
- return iter(self)
-
-try:
- # Register RowProxy with Sequence,
- # so sequence protocol is implemented
- from collections import Sequence
- Sequence.register(RowProxy)
-except ImportError:
- pass
-
-
-class ResultMetaData(object):
- """Handle cursor.description, applying additional info from an execution
- context."""
-
- def __init__(self, parent, metadata):
- self._processors = processors = []
-
- # We do not strictly need to store the processor in the key mapping,
- # though it is faster in the Python version (probably because of the
- # saved attribute lookup self._processors)
- self._keymap = keymap = {}
- self.keys = []
- context = parent.context
- dialect = context.dialect
- typemap = dialect.dbapi_type_map
- translate_colname = context._translate_colname
- self.case_sensitive = dialect.case_sensitive
-
- # high precedence key values.
- primary_keymap = {}
-
- for i, rec in enumerate(metadata):
- colname = rec[0]
- coltype = rec[1]
-
- if dialect.description_encoding:
- colname = dialect._description_decoder(colname)
-
- if translate_colname:
- colname, untranslated = translate_colname(colname)
-
- if dialect.requires_name_normalize:
- colname = dialect.normalize_name(colname)
-
- if context.result_map:
- try:
- name, obj, type_ = context.result_map[colname
- if self.case_sensitive
- else colname.lower()]
- except KeyError:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
- else:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
-
- processor = type_._cached_result_processor(dialect, coltype)
-
- processors.append(processor)
- rec = (processor, obj, i)
-
- # indexes as keys. This is only needed for the Python version of
- # RowProxy (the C version uses a faster path for integer indexes).
- primary_keymap[i] = rec
-
- # populate primary keymap, looking for conflicts.
- if primary_keymap.setdefault(
- name if self.case_sensitive
- else name.lower(),
- rec) is not rec:
- # place a record that doesn't have the "index" - this
- # is interpreted later as an AmbiguousColumnError,
- # but only when actually accessed. Columns
- # colliding by name is not a problem if those names
- # aren't used; integer and ColumnElement access is always
- # unambiguous.
- primary_keymap[name
- if self.case_sensitive
- else name.lower()] = (processor, obj, None)
-
-
- self.keys.append(colname)
- if obj:
- for o in obj:
- keymap[o] = rec
-
- if translate_colname and \
- untranslated:
- keymap[untranslated] = rec
-
- # overwrite keymap values with those of the
- # high precedence keymap.
- keymap.update(primary_keymap)
-
- if parent._echo:
- context.engine.logger.debug(
- "Col %r", tuple(x[0] for x in metadata))
-
- @util.pending_deprecation("0.8", "sqlite dialect uses "
- "_translate_colname() now")
- def _set_keymap_synonym(self, name, origname):
- """Set a synonym for the given name.
-
- Some dialects (SQLite at the moment) may use this to
- adjust the column names that are significant within a
- row.
-
- """
- rec = (processor, obj, i) = self._keymap[origname if
- self.case_sensitive
- else origname.lower()]
- if self._keymap.setdefault(name, rec) is not rec:
- self._keymap[name] = (processor, obj, None)
-
- def _key_fallback(self, key, raiseerr=True):
- map = self._keymap
- result = None
- if isinstance(key, basestring):
- result = map.get(key if self.case_sensitive else key.lower())
- # fallback for targeting a ColumnElement to a textual expression
- # this is a rare use case which only occurs when matching text()
- # or colummn('name') constructs to ColumnElements, or after a
- # pickle/unpickle roundtrip
- elif isinstance(key, expression.ColumnElement):
- if key._label and (
- key._label
- if self.case_sensitive
- else key._label.lower()) in map:
- result = map[key._label
- if self.case_sensitive
- else key._label.lower()]
- elif hasattr(key, 'name') and (
- key.name
- if self.case_sensitive
- else key.name.lower()) in map:
- # match is only on name.
- result = map[key.name
- if self.case_sensitive
- else key.name.lower()]
- # search extra hard to make sure this
- # isn't a column/label name overlap.
- # this check isn't currently available if the row
- # was unpickled.
- if result is not None and \
- result[1] is not None:
- for obj in result[1]:
- if key._compare_name_for_result(obj):
- break
- else:
- result = None
- if result is None:
- if raiseerr:
- raise exc.NoSuchColumnError(
- "Could not locate column in row for column '%s'" %
- expression._string_or_unprintable(key))
- else:
- return None
- else:
- map[key] = result
- return result
-
- def _has_key(self, row, key):
- if key in self._keymap:
- return True
- else:
- return self._key_fallback(key, False) is not None
-
- def __getstate__(self):
- return {
- '_pickled_keymap': dict(
- (key, index)
- for key, (processor, obj, index) in self._keymap.iteritems()
- if isinstance(key, (basestring, int))
- ),
- 'keys': self.keys,
- "case_sensitive":self.case_sensitive,
- }
-
- def __setstate__(self, state):
- # the row has been processed at pickling time so we don't need any
- # processor anymore
- self._processors = [None for _ in xrange(len(state['keys']))]
- self._keymap = keymap = {}
- for key, index in state['_pickled_keymap'].iteritems():
- # not preserving "obj" here, unfortunately our
- # proxy comparison fails with the unpickle
- keymap[key] = (None, None, index)
- self.keys = state['keys']
- self.case_sensitive = state['case_sensitive']
- self._echo = False
-
-
-class ResultProxy(object):
- """Wraps a DB-API cursor object to provide easier access to row columns.
-
- Individual columns may be accessed by their integer position,
- case-insensitive column name, or by ``schema.Column``
- object. e.g.::
-
- row = fetchone()
-
- col1 = row[0] # access via integer position
-
- col2 = row['col2'] # access via name
-
- col3 = row[mytable.c.mycol] # access via Column object.
-
- ``ResultProxy`` also handles post-processing of result column
- data using ``TypeEngine`` objects, which are referenced from
- the originating SQL statement that produced this result set.
-
- """
-
- _process_row = RowProxy
- out_parameters = None
- _can_close_connection = False
-
- def __init__(self, context):
- self.context = context
- self.dialect = context.dialect
- self.closed = False
- self.cursor = self._saved_cursor = context.cursor
- self.connection = context.root_connection
- self._echo = self.connection._echo and \
- context.engine._should_log_debug()
- self._init_metadata()
-
- def _init_metadata(self):
- metadata = self._cursor_description()
- if metadata is None:
- self._metadata = None
- else:
- self._metadata = ResultMetaData(self, metadata)
-
- def keys(self):
- """Return the current set of string keys for rows."""
- if self._metadata:
- return self._metadata.keys
- else:
- return []
-
- @util.memoized_property
- def rowcount(self):
- """Return the 'rowcount' for this result.
-
- The 'rowcount' reports the number of rows *matched*
- by the WHERE criterion of an UPDATE or DELETE statement.
-
- .. note::
-
- Notes regarding :attr:`.ResultProxy.rowcount`:
-
-
- * This attribute returns the number of rows *matched*,
- which is not necessarily the same as the number of rows
- that were actually *modified* - an UPDATE statement, for example,
- may have no net change on a given row if the SET values
- given are the same as those present in the row already.
- Such a row would be matched but not modified.
- On backends that feature both styles, such as MySQL,
- rowcount is configured by default to return the match
- count in all cases.
-
- * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
- with an UPDATE or DELETE statement. Contrary to what the Python
- DBAPI says, it does *not* return the
- number of rows available from the results of a SELECT statement
- as DBAPIs cannot support this functionality when rows are
- unbuffered.
-
- * :attr:`.ResultProxy.rowcount` may not be fully implemented by
- all dialects. In particular, most DBAPIs do not support an
- aggregate rowcount result from an executemany call.
- The :meth:`.ResultProxy.supports_sane_rowcount` and
- :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
- will report from the dialect if each usage is known to be
- supported.
-
- * Statements that use RETURNING may not return a correct
- rowcount.
-
- """
- try:
- return self.context.rowcount
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None, self.cursor, self.context)
- raise
-
- @property
- def lastrowid(self):
- """return the 'lastrowid' accessor on the DBAPI cursor.
-
- This is a DBAPI specific method and is only functional
- for those backends which support it, for statements
- where it is appropriate. It's behavior is not
- consistent across backends.
-
- Usage of this method is normally unnecessary when
- using insert() expression constructs; the
- :attr:`~ResultProxy.inserted_primary_key` attribute provides a
- tuple of primary key values for a newly inserted row,
- regardless of database backend.
-
- """
- try:
- return self._saved_cursor.lastrowid
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self._saved_cursor, self.context)
- raise
-
- @property
- def returns_rows(self):
- """True if this :class:`.ResultProxy` returns rows.
-
- I.e. if it is legal to call the methods
- :meth:`~.ResultProxy.fetchone`,
- :meth:`~.ResultProxy.fetchmany`
- :meth:`~.ResultProxy.fetchall`.
-
- """
- return self._metadata is not None
-
- @property
- def is_insert(self):
- """True if this :class:`.ResultProxy` is the result
- of a executing an expression language compiled
- :func:`.expression.insert` construct.
-
- When True, this implies that the
- :attr:`inserted_primary_key` attribute is accessible,
- assuming the statement did not include
- a user defined "returning" construct.
-
- """
- return self.context.isinsert
-
- def _cursor_description(self):
- """May be overridden by subclasses."""
-
- return self._saved_cursor.description
-
- def close(self, _autoclose_connection=True):
- """Close this ResultProxy.
-
- Closes the underlying DBAPI cursor corresponding to the execution.
-
- Note that any data cached within this ResultProxy is still available.
- For some types of results, this may include buffered rows.
-
- If this ResultProxy was generated from an implicit execution,
- the underlying Connection will also be closed (returns the
- underlying DBAPI connection to the connection pool.)
-
- This method is called automatically when:
-
- * all result rows are exhausted using the fetchXXX() methods.
- * cursor.description is None.
-
- """
-
- if not self.closed:
- self.closed = True
- self.connection._safe_close_cursor(self.cursor)
- if _autoclose_connection and \
- self.connection.should_close_with_result:
- self.connection.close()
- # allow consistent errors
- self.cursor = None
-
- def __iter__(self):
- while True:
- row = self.fetchone()
- if row is None:
- raise StopIteration
- else:
- yield row
-
- @util.memoized_property
- def inserted_primary_key(self):
- """Return the primary key for the row just inserted.
-
- The return value is a list of scalar values
- corresponding to the list of primary key columns
- in the target table.
-
- This only applies to single row :func:`.insert`
- constructs which did not explicitly specify
- :meth:`.Insert.returning`.
-
- Note that primary key columns which specify a
- server_default clause,
- or otherwise do not qualify as "autoincrement"
- columns (see the notes at :class:`.Column`), and were
- generated using the database-side default, will
- appear in this list as ``None`` unless the backend
- supports "returning" and the insert statement executed
- with the "implicit returning" enabled.
-
- Raises :class:`.InvalidRequestError` if the executed
- statement is not a compiled expression construct
- or is not an insert() construct.
-
- """
-
- if not self.context.compiled:
- raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
- elif not self.context.isinsert:
- raise exc.InvalidRequestError(
- "Statement is not an insert() "
- "expression construct.")
- elif self.context._is_explicit_returning:
- raise exc.InvalidRequestError(
- "Can't call inserted_primary_key "
- "when returning() "
- "is used.")
-
- return self.context.inserted_primary_key
-
- def last_updated_params(self):
- """Return the collection of updated parameters from this
- execution.
-
- Raises :class:`.InvalidRequestError` if the executed
- statement is not a compiled expression construct
- or is not an update() construct.
-
- """
- if not self.context.compiled:
- raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
- elif not self.context.isupdate:
- raise exc.InvalidRequestError(
- "Statement is not an update() "
- "expression construct.")
- elif self.context.executemany:
- return self.context.compiled_parameters
- else:
- return self.context.compiled_parameters[0]
-
- def last_inserted_params(self):
- """Return the collection of inserted parameters from this
- execution.
-
- Raises :class:`.InvalidRequestError` if the executed
- statement is not a compiled expression construct
- or is not an insert() construct.
-
- """
- if not self.context.compiled:
- raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
- elif not self.context.isinsert:
- raise exc.InvalidRequestError(
- "Statement is not an insert() "
- "expression construct.")
- elif self.context.executemany:
- return self.context.compiled_parameters
- else:
- return self.context.compiled_parameters[0]
-
- def lastrow_has_defaults(self):
- """Return ``lastrow_has_defaults()`` from the underlying
- :class:`.ExecutionContext`.
-
- See :class:`.ExecutionContext` for details.
-
- """
-
- return self.context.lastrow_has_defaults()
-
- def postfetch_cols(self):
- """Return ``postfetch_cols()`` from the underlying :class:`.ExecutionContext`.
-
- See :class:`.ExecutionContext` for details.
-
- Raises :class:`.InvalidRequestError` if the executed
- statement is not a compiled expression construct
- or is not an insert() or update() construct.
-
- """
-
- if not self.context.compiled:
- raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
- elif not self.context.isinsert and not self.context.isupdate:
- raise exc.InvalidRequestError(
- "Statement is not an insert() or update() "
- "expression construct.")
- return self.context.postfetch_cols
-
- def prefetch_cols(self):
- """Return ``prefetch_cols()`` from the underlying :class:`.ExecutionContext`.
-
- See :class:`.ExecutionContext` for details.
-
- Raises :class:`.InvalidRequestError` if the executed
- statement is not a compiled expression construct
- or is not an insert() or update() construct.
-
- """
-
- if not self.context.compiled:
- raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
- elif not self.context.isinsert and not self.context.isupdate:
- raise exc.InvalidRequestError(
- "Statement is not an insert() or update() "
- "expression construct.")
- return self.context.prefetch_cols
-
- def supports_sane_rowcount(self):
- """Return ``supports_sane_rowcount`` from the dialect.
-
- See :attr:`.ResultProxy.rowcount` for background.
-
- """
-
- return self.dialect.supports_sane_rowcount
-
- def supports_sane_multi_rowcount(self):
- """Return ``supports_sane_multi_rowcount`` from the dialect.
-
- See :attr:`.ResultProxy.rowcount` for background.
-
- """
-
- return self.dialect.supports_sane_multi_rowcount
-
- def _fetchone_impl(self):
- try:
- return self.cursor.fetchone()
- except AttributeError:
- self._non_result()
-
- def _fetchmany_impl(self, size=None):
- try:
- if size is None:
- return self.cursor.fetchmany()
- else:
- return self.cursor.fetchmany(size)
- except AttributeError:
- self._non_result()
-
- def _fetchall_impl(self):
- try:
- return self.cursor.fetchall()
- except AttributeError:
- self._non_result()
-
- def _non_result(self):
- if self._metadata is None:
- raise exc.ResourceClosedError(
- "This result object does not return rows. "
- "It has been closed automatically.",
- )
- else:
- raise exc.ResourceClosedError("This result object is closed.")
-
- def process_rows(self, rows):
- process_row = self._process_row
- metadata = self._metadata
- keymap = metadata._keymap
- processors = metadata._processors
- if self._echo:
- log = self.context.engine.logger.debug
- l = []
- for row in rows:
- log("Row %r", row)
- l.append(process_row(metadata, row, processors, keymap))
- return l
- else:
- return [process_row(metadata, row, processors, keymap)
- for row in rows]
-
- def fetchall(self):
- """Fetch all rows, just like DB-API ``cursor.fetchall()``."""
-
- try:
- l = self.process_rows(self._fetchall_impl())
- self.close()
- return l
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- def fetchmany(self, size=None):
- """Fetch many rows, just like DB-API
- ``cursor.fetchmany(size=cursor.arraysize)``.
-
- If rows are present, the cursor remains open after this is called.
- Else the cursor is automatically closed and an empty list is returned.
-
- """
-
- try:
- l = self.process_rows(self._fetchmany_impl(size))
- if len(l) == 0:
- self.close()
- return l
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- def fetchone(self):
- """Fetch one row, just like DB-API ``cursor.fetchone()``.
-
- If a row is present, the cursor remains open after this is called.
- Else the cursor is automatically closed and None is returned.
-
- """
- try:
- row = self._fetchone_impl()
- if row is not None:
- return self.process_rows([row])[0]
- else:
- self.close()
- return None
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- def first(self):
- """Fetch the first row and then close the result set unconditionally.
-
- Returns None if no row is present.
-
- """
- if self._metadata is None:
- self._non_result()
-
- try:
- row = self._fetchone_impl()
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- try:
- if row is not None:
- return self.process_rows([row])[0]
- else:
- return None
- finally:
- self.close()
-
- def scalar(self):
- """Fetch the first column of the first row, and close the result set.
-
- Returns None if no row is present.
-
- """
- row = self.first()
- if row is not None:
- return row[0]
- else:
- return None
-
-class BufferedRowResultProxy(ResultProxy):
- """A ResultProxy with row buffering behavior.
-
- ``ResultProxy`` that buffers the contents of a selection of rows
- before ``fetchone()`` is called. This is to allow the results of
- ``cursor.description`` to be available immediately, when
- interfacing with a DB-API that requires rows to be consumed before
- this information is available (currently psycopg2, when used with
- server-side cursors).
-
- The pre-fetching behavior fetches only one row initially, and then
- grows its buffer size by a fixed amount with each successive need
- for additional rows up to a size of 100.
- """
-
- def _init_metadata(self):
- self.__buffer_rows()
- super(BufferedRowResultProxy, self)._init_metadata()
-
- # this is a "growth chart" for the buffering of rows.
- # each successive __buffer_rows call will use the next
- # value in the list for the buffer size until the max
- # is reached
- size_growth = {
- 1 : 5,
- 5 : 10,
- 10 : 20,
- 20 : 50,
- 50 : 100,
- 100 : 250,
- 250 : 500,
- 500 : 1000
- }
-
- def __buffer_rows(self):
- size = getattr(self, '_bufsize', 1)
- self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
- self._bufsize = self.size_growth.get(size, size)
-
- def _fetchone_impl(self):
- if self.closed:
- return None
- if not self.__rowbuffer:
- self.__buffer_rows()
- if not self.__rowbuffer:
- return None
- return self.__rowbuffer.popleft()
-
- def _fetchmany_impl(self, size=None):
- if size is None:
- return self._fetchall_impl()
- result = []
- for x in range(0, size):
- row = self._fetchone_impl()
- if row is None:
- break
- result.append(row)
- return result
-
- def _fetchall_impl(self):
- self.__rowbuffer.extend(self.cursor.fetchall())
- ret = self.__rowbuffer
- self.__rowbuffer = collections.deque()
- return ret
-
-class FullyBufferedResultProxy(ResultProxy):
- """A result proxy that buffers rows fully upon creation.
-
- Used for operations where a result is to be delivered
- after the database conversation can not be continued,
- such as MSSQL INSERT...OUTPUT after an autocommit.
-
- """
- def _init_metadata(self):
- super(FullyBufferedResultProxy, self)._init_metadata()
- self.__rowbuffer = self._buffer_rows()
-
- def _buffer_rows(self):
- return collections.deque(self.cursor.fetchall())
-
- def _fetchone_impl(self):
- if self.__rowbuffer:
- return self.__rowbuffer.popleft()
- else:
- return None
-
- def _fetchmany_impl(self, size=None):
- if size is None:
- return self._fetchall_impl()
- result = []
- for x in range(0, size):
- row = self._fetchone_impl()
- if row is None:
- break
- result.append(row)
- return result
-
- def _fetchall_impl(self):
- ret = self.__rowbuffer
- self.__rowbuffer = collections.deque()
- return ret
-
-class BufferedColumnRow(RowProxy):
- def __init__(self, parent, row, processors, keymap):
- # preprocess row
- row = list(row)
- # this is a tad faster than using enumerate
- index = 0
- for processor in parent._orig_processors:
- if processor is not None:
- row[index] = processor(row[index])
- index += 1
- row = tuple(row)
- super(BufferedColumnRow, self).__init__(parent, row,
- processors, keymap)
-
-class BufferedColumnResultProxy(ResultProxy):
- """A ResultProxy with column buffering behavior.
-
- ``ResultProxy`` that loads all columns into memory each time
- fetchone() is called. If fetchmany() or fetchall() are called,
- the full grid of results is fetched. This is to operate with
- databases where result rows contain "live" results that fall out
- of scope unless explicitly fetched. Currently this includes
- cx_Oracle LOB objects.
-
- """
-
- _process_row = BufferedColumnRow
-
- def _init_metadata(self):
- super(BufferedColumnResultProxy, self)._init_metadata()
- metadata = self._metadata
- # orig_processors will be used to preprocess each row when they are
- # constructed.
- metadata._orig_processors = metadata._processors
- # replace the all type processors by None processors.
- metadata._processors = [None for _ in xrange(len(metadata.keys))]
- keymap = {}
- for k, (func, obj, index) in metadata._keymap.iteritems():
- keymap[k] = (None, obj, index)
- self._metadata._keymap = keymap
-
- def fetchall(self):
- # can't call cursor.fetchall(), since rows must be
- # fully processed before requesting more from the DBAPI.
- l = []
- while True:
- row = self.fetchone()
- if row is None:
- break
- l.append(row)
- return l
-
- def fetchmany(self, size=None):
- # can't call cursor.fetchmany(), since rows must be
- # fully processed before requesting more from the DBAPI.
- if size is None:
- return self.fetchall()
- l = []
- for i in xrange(size):
- row = self.fetchone()
- if row is None:
- break
- l.append(row)
- return l
-
-def connection_memoize(key):
- """Decorator, memoize a function in a connection.info stash.
-
- Only applicable to functions which take no arguments other than a
- connection. The memo will be stored in ``connection.info[key]``.
- """
-
- @util.decorator
- def decorated(fn, self, connection):
- connection = connection.connect()
- try:
- return connection.info[key]
- except KeyError:
- connection.info[key] = val = fn(self, connection)
- return val
-
- return decorated
import re
import random
-from . import base, reflection
+from . import reflection, interfaces, result
from ..sql import compiler, expression
from .. import exc, types as sqltypes, util, pool, processors
import codecs
re.I | re.UNICODE)
-class DefaultDialect(base.Dialect):
+class DefaultDialect(interfaces.Dialect):
"""Default implementation of Dialect"""
statement_compiler = compiler.SQLCompiler
# the configured default of this dialect.
self.set_isolation_level(dbapi_conn, self.default_isolation_level)
-class DefaultExecutionContext(base.ExecutionContext):
+class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert = False
isupdate = False
isdelete = False
pass
def get_result_proxy(self):
- return base.ResultProxy(self)
+ return result.ResultProxy(self)
@property
def rowcount(self):
--- /dev/null
+# engine/interfaces.py
+# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Define core interfaces used by the engine system."""
+
+from .. import util, event, events
+
+class Dialect(object):
+ """Define the behavior of a specific database and DB-API combination.
+
+ Any aspect of metadata definition, SQL query generation,
+ execution, result-set handling, or anything else which varies
+ between databases is defined under the general category of the
+ Dialect. The Dialect acts as a factory for other
+ database-specific object implementations including
+ ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
+
+ All Dialects implement the following attributes:
+
+ name
+ identifying name for the dialect from a DBAPI-neutral point of view
+ (i.e. 'sqlite')
+
+ driver
+ identifying name for the dialect's DBAPI
+
+ positional
+ True if the paramstyle for this Dialect is positional.
+
+ paramstyle
+ the paramstyle to be used (some DB-APIs support multiple
+ paramstyles).
+
+ convert_unicode
+ True if Unicode conversion should be applied to all ``str``
+ types.
+
+ encoding
+ type of encoding to use for unicode, usually defaults to
+ 'utf-8'.
+
+ statement_compiler
+ a :class:`~Compiled` class used to compile SQL statements
+
+ ddl_compiler
+ a :class:`~Compiled` class used to compile DDL statements
+
+ server_version_info
+ a tuple containing a version number for the DB backend in use.
+ This value is only available for supporting dialects, and is
+ typically populated during the initial connection to the database.
+
+ default_schema_name
+ the name of the default schema. This value is only available for
+ supporting dialects, and is typically populated during the
+ initial connection to the database.
+
+ execution_ctx_cls
+ a :class:`.ExecutionContext` class used to handle statement execution
+
+ execute_sequence_format
+ either the 'tuple' or 'list' type, depending on what cursor.execute()
+ accepts for the second argument (they vary).
+
+ preparer
+ a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
+ quote identifiers.
+
+ supports_alter
+ ``True`` if the database supports ``ALTER TABLE``.
+
+ max_identifier_length
+ The maximum length of identifier names.
+
+ supports_unicode_statements
+ Indicate whether the DB-API can receive SQL statements as Python
+ unicode strings
+
+ supports_unicode_binds
+ Indicate whether the DB-API can receive string bind parameters
+ as Python unicode strings
+
+ supports_sane_rowcount
+ Indicate whether the dialect properly implements rowcount for
+ ``UPDATE`` and ``DELETE`` statements.
+
+ supports_sane_multi_rowcount
+ Indicate whether the dialect properly implements rowcount for
+ ``UPDATE`` and ``DELETE`` statements when executed via
+ executemany.
+
+ preexecute_autoincrement_sequences
+ True if 'implicit' primary key functions must be executed separately
+ in order to get their value. This is currently oriented towards
+ Postgresql.
+
+ implicit_returning
+ use RETURNING or equivalent during INSERT execution in order to load
+ newly generated primary keys and other column defaults in one execution,
+ which are then available via inserted_primary_key.
+ If an insert statement has returning() specified explicitly,
+ the "implicit" functionality is not used and inserted_primary_key
+ will not be available.
+
+ dbapi_type_map
+ A mapping of DB-API type objects present in this Dialect's
+ DB-API implementation mapped to TypeEngine implementations used
+ by the dialect.
+
+ This is used to apply types to result sets based on the DB-API
+ types present in cursor.description; it only takes effect for
+ result sets against textual statements where no explicit
+ typemap was present.
+
+ colspecs
+ A dictionary of TypeEngine classes from sqlalchemy.types mapped
+ to subclasses that are specific to the dialect class. This
+ dictionary is class-level only and is not accessed from the
+ dialect instance itself.
+
+ supports_default_values
+ Indicates if the construct ``INSERT INTO tablename DEFAULT
+ VALUES`` is supported
+
+ supports_sequences
+ Indicates if the dialect supports CREATE SEQUENCE or similar.
+
+ sequences_optional
+ If True, indicates if the "optional" flag on the Sequence() construct
+ should signal to not generate a CREATE SEQUENCE. Applies only to
+ dialects that support sequences. Currently used only to allow Postgresql
+ SERIAL to be used on a column that specifies Sequence() for usage on
+ other backends.
+
+ supports_native_enum
+ Indicates if the dialect supports a native ENUM construct.
+ This will prevent types.Enum from generating a CHECK
+ constraint when that type is used.
+
+ supports_native_boolean
+ Indicates if the dialect supports a native boolean construct.
+ This will prevent types.Boolean from generating a CHECK
+ constraint when that type is used.
+
+ """
+
+ def create_connect_args(self, url):
+ """Build DB-API compatible connection arguments.
+
+ Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
+ consisting of a `*args`/`**kwargs` suitable to send directly
+ to the dbapi's connect function.
+
+ """
+
+ raise NotImplementedError()
+
+ @classmethod
+ def type_descriptor(cls, typeobj):
+ """Transform a generic type to a dialect-specific type.
+
+ Dialect classes will usually use the
+ :func:`~sqlalchemy.types.adapt_type` function in the types module to
+ make this job easy.
+
+ The returned result is cached *per dialect class* so can
+ contain no dialect-instance state.
+
+ """
+
+ raise NotImplementedError()
+
+ def initialize(self, connection):
+ """Called during strategized creation of the dialect with a
+ connection.
+
+ Allows dialects to configure options based on server version info or
+ other properties.
+
+ The connection passed here is a SQLAlchemy Connection object,
+ with full capabilities.
+
+ The initalize() method of the base dialect should be called via
+ super().
+
+ """
+
+ pass
+
+ def reflecttable(self, connection, table, include_columns=None):
+ """Load table description from the database.
+
+ Given a :class:`.Connection` and a
+ :class:`~sqlalchemy.schema.Table` object, reflect its columns and
+ properties from the database. If include_columns (a list or
+ set) is specified, limit the autoload to the given column
+ names.
+
+ The default implementation uses the
+ :class:`~sqlalchemy.engine.reflection.Inspector` interface to
+ provide the output, building upon the granular table/column/
+ constraint etc. methods of :class:`.Dialect`.
+
+ """
+
+ raise NotImplementedError()
+
+ def get_columns(self, connection, table_name, schema=None, **kw):
+ """Return information about columns in `table_name`.
+
+ Given a :class:`.Connection`, a string
+ `table_name`, and an optional string `schema`, return column
+ information as a list of dictionaries with these keys:
+
+ name
+ the column's name
+
+ type
+ [sqlalchemy.types#TypeEngine]
+
+ nullable
+ boolean
+
+ default
+ the column's default value
+
+ autoincrement
+ boolean
+
+ sequence
+ a dictionary of the form
+ {'name' : str, 'start' :int, 'increment': int}
+
+ Additional column attributes may be present.
+ """
+
+ raise NotImplementedError()
+
+ def get_primary_keys(self, connection, table_name, schema=None, **kw):
+ """Return information about primary keys in `table_name`.
+
+
+ Deprecated. This method is only called by the default
+ implementation of :meth:`get_pk_constraint()`. Dialects should
+ instead implement this method directly.
+
+ """
+
+ raise NotImplementedError()
+
+ def get_pk_constraint(self, connection, table_name, schema=None, **kw):
+ """Return information about the primary key constraint on
+ table_name`.
+
+ Given a :class:`.Connection`, a string
+ `table_name`, and an optional string `schema`, return primary
+ key information as a dictionary with these keys:
+
+ constrained_columns
+ a list of column names that make up the primary key
+
+ name
+ optional name of the primary key constraint.
+
+ """
+ raise NotImplementedError()
+
+ def get_foreign_keys(self, connection, table_name, schema=None, **kw):
+ """Return information about foreign_keys in `table_name`.
+
+ Given a :class:`.Connection`, a string
+ `table_name`, and an optional string `schema`, return foreign
+ key information as a list of dicts with these keys:
+
+ name
+ the constraint's name
+
+ constrained_columns
+ a list of column names that make up the foreign key
+
+ referred_schema
+ the name of the referred schema
+
+ referred_table
+ the name of the referred table
+
+ referred_columns
+ a list of column names in the referred table that correspond to
+ constrained_columns
+ """
+
+ raise NotImplementedError()
+
+ def get_table_names(self, connection, schema=None, **kw):
+ """Return a list of table names for `schema`."""
+
+ raise NotImplementedError
+
+ def get_view_names(self, connection, schema=None, **kw):
+ """Return a list of all view names available in the database.
+
+ schema:
+ Optional, retrieve names from a non-default schema.
+ """
+
+ raise NotImplementedError()
+
+ def get_view_definition(self, connection, view_name, schema=None, **kw):
+ """Return view definition.
+
+ Given a :class:`.Connection`, a string
+ `view_name`, and an optional string `schema`, return the view
+ definition.
+ """
+
+ raise NotImplementedError()
+
+ def get_indexes(self, connection, table_name, schema=None, **kw):
+ """Return information about indexes in `table_name`.
+
+ Given a :class:`.Connection`, a string
+ `table_name` and an optional string `schema`, return index
+ information as a list of dictionaries with these keys:
+
+ name
+ the index's name
+
+ column_names
+ list of column names in order
+
+ unique
+ boolean
+ """
+
+ raise NotImplementedError()
+
+ def normalize_name(self, name):
+ """convert the given name to lowercase if it is detected as
+ case insensitive.
+
+ this method is only used if the dialect defines
+ requires_name_normalize=True.
+
+ """
+ raise NotImplementedError()
+
+ def denormalize_name(self, name):
+ """convert the given name to a case insensitive identifier
+ for the backend if it is an all-lowercase name.
+
+ this method is only used if the dialect defines
+ requires_name_normalize=True.
+
+ """
+ raise NotImplementedError()
+
+ def has_table(self, connection, table_name, schema=None):
+ """Check the existence of a particular table in the database.
+
+ Given a :class:`.Connection` object and a string
+ `table_name`, return True if the given table (possibly within
+ the specified `schema`) exists in the database, False
+ otherwise.
+ """
+
+ raise NotImplementedError()
+
+ def has_sequence(self, connection, sequence_name, schema=None):
+ """Check the existence of a particular sequence in the database.
+
+ Given a :class:`.Connection` object and a string
+ `sequence_name`, return True if the given sequence exists in
+ the database, False otherwise.
+ """
+
+ raise NotImplementedError()
+
+ def _get_server_version_info(self, connection):
+ """Retrieve the server version info from the given connection.
+
+ This is used by the default implementation to populate the
+ "server_version_info" attribute and is called exactly
+ once upon first connect.
+
+ """
+
+ raise NotImplementedError()
+
+ def _get_default_schema_name(self, connection):
+ """Return the string name of the currently selected schema from
+ the given connection.
+
+ This is used by the default implementation to populate the
+ "default_schema_name" attribute and is called exactly
+ once upon first connect.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_begin(self, connection):
+ """Provide an implementation of *connection.begin()*, given a
+ DB-API connection."""
+
+ raise NotImplementedError()
+
+ def do_rollback(self, connection):
+ """Provide an implementation of *connection.rollback()*, given
+ a DB-API connection."""
+
+ raise NotImplementedError()
+
+ def create_xid(self):
+ """Create a two-phase transaction ID.
+
+ This id will be passed to do_begin_twophase(),
+ do_rollback_twophase(), do_commit_twophase(). Its format is
+ unspecified.
+ """
+
+ raise NotImplementedError()
+
+ def do_commit(self, connection):
+ """Provide an implementation of *connection.commit()*, given a
+ DB-API connection."""
+
+ raise NotImplementedError()
+
+ def do_savepoint(self, connection, name):
+ """Create a savepoint with the given name on a SQLAlchemy
+ connection."""
+
+ raise NotImplementedError()
+
+ def do_rollback_to_savepoint(self, connection, name):
+ """Rollback a SQL Alchemy connection to the named savepoint."""
+
+ raise NotImplementedError()
+
+ def do_release_savepoint(self, connection, name):
+ """Release the named savepoint on a SQL Alchemy connection."""
+
+ raise NotImplementedError()
+
+ def do_begin_twophase(self, connection, xid):
+ """Begin a two phase transaction on the given connection."""
+
+ raise NotImplementedError()
+
+ def do_prepare_twophase(self, connection, xid):
+ """Prepare a two phase transaction on the given connection."""
+
+ raise NotImplementedError()
+
+ def do_rollback_twophase(self, connection, xid, is_prepared=True,
+ recover=False):
+ """Rollback a two phase transaction on the given connection."""
+
+ raise NotImplementedError()
+
+ def do_commit_twophase(self, connection, xid, is_prepared=True,
+ recover=False):
+ """Commit a two phase transaction on the given connection."""
+
+ raise NotImplementedError()
+
+ def do_recover_twophase(self, connection):
+ """Recover list of uncommited prepared two phase transaction
+ identifiers on the given connection."""
+
+ raise NotImplementedError()
+
+ def do_executemany(self, cursor, statement, parameters, context=None):
+ """Provide an implementation of ``cursor.executemany(statement,
+ parameters)``."""
+
+ raise NotImplementedError()
+
+ def do_execute(self, cursor, statement, parameters, context=None):
+ """Provide an implementation of ``cursor.execute(statement,
+ parameters)``."""
+
+ raise NotImplementedError()
+
+ def do_execute_no_params(self, cursor, statement, parameters, context=None):
+ """Provide an implementation of ``cursor.execute(statement)``.
+
+ The parameter collection should not be sent.
+
+ """
+
+ raise NotImplementedError()
+
+ def is_disconnect(self, e, connection, cursor):
+ """Return True if the given DB-API error indicates an invalid
+ connection"""
+
+ raise NotImplementedError()
+
+ def connect(self):
+ """return a callable which sets up a newly created DBAPI connection.
+
+ The callable accepts a single argument "conn" which is the
+ DBAPI connection itself. It has no return value.
+
+ This is used to set dialect-wide per-connection options such as
+ isolation modes, unicode modes, etc.
+
+ If a callable is returned, it will be assembled into a pool listener
+ that receives the direct DBAPI connection, with all wrappers removed.
+
+ If None is returned, no listener will be generated.
+
+ """
+ return None
+
+ def reset_isolation_level(self, dbapi_conn):
+ """Given a DBAPI connection, revert its isolation to the default."""
+
+ raise NotImplementedError()
+
+ def set_isolation_level(self, dbapi_conn, level):
+ """Given a DBAPI connection, set its isolation level."""
+
+ raise NotImplementedError()
+
+ def get_isolation_level(self, dbapi_conn):
+ """Given a DBAPI connection, return its isolation level."""
+
+ raise NotImplementedError()
+
+
+class ExecutionContext(object):
+ """A messenger object for a Dialect that corresponds to a single
+ execution.
+
+ ExecutionContext should have these data members:
+
+ connection
+ Connection object which can be freely used by default value
+ generators to execute SQL. This Connection should reference the
+ same underlying connection/transactional resources of
+ root_connection.
+
+ root_connection
+ Connection object which is the source of this ExecutionContext. This
+ Connection may have close_with_result=True set, in which case it can
+ only be used once.
+
+ dialect
+ dialect which created this ExecutionContext.
+
+ cursor
+ DB-API cursor procured from the connection,
+
+ compiled
+ if passed to constructor, sqlalchemy.engine.base.Compiled object
+ being executed,
+
+ statement
+ string version of the statement to be executed. Is either
+ passed to the constructor, or must be created from the
+ sql.Compiled object by the time pre_exec() has completed.
+
+ parameters
+ bind parameters passed to the execute() method. For compiled
+ statements, this is a dictionary or list of dictionaries. For
+ textual statements, it should be in a format suitable for the
+ dialect's paramstyle (i.e. dict or list of dicts for non
+ positional, list or list of lists/tuples for positional).
+
+ isinsert
+ True if the statement is an INSERT.
+
+ isupdate
+ True if the statement is an UPDATE.
+
+ should_autocommit
+ True if the statement is a "committable" statement.
+
+ prefetch_cols
+ a list of Column objects for which a client-side default
+ was fired off. Applies to inserts and updates.
+
+ postfetch_cols
+ a list of Column objects for which a server-side default or
+ inline SQL expression value was fired off. Applies to inserts
+ and updates.
+ """
+
+ def create_cursor(self):
+ """Return a new cursor generated from this ExecutionContext's
+ connection.
+
+ Some dialects may wish to change the behavior of
+ connection.cursor(), such as postgresql which may return a PG
+ "server side" cursor.
+ """
+
+ raise NotImplementedError()
+
+ def pre_exec(self):
+ """Called before an execution of a compiled statement.
+
+ If a compiled statement was passed to this ExecutionContext,
+ the `statement` and `parameters` datamembers must be
+ initialized after this statement is complete.
+ """
+
+ raise NotImplementedError()
+
+ def post_exec(self):
+ """Called after the execution of a compiled statement.
+
+ If a compiled statement was passed to this ExecutionContext,
+ the `last_insert_ids`, `last_inserted_params`, etc.
+ datamembers should be available after this method completes.
+ """
+
+ raise NotImplementedError()
+
+ def result(self):
+ """Return a result object corresponding to this ExecutionContext.
+
+ Returns a ResultProxy.
+ """
+
+ raise NotImplementedError()
+
+ def handle_dbapi_exception(self, e):
+ """Receive a DBAPI exception which occurred upon execute, result
+ fetch, etc."""
+
+ raise NotImplementedError()
+
+ def should_autocommit_text(self, statement):
+ """Parse the given textual statement and return True if it refers to
+ a "committable" statement"""
+
+ raise NotImplementedError()
+
+ def lastrow_has_defaults(self):
+ """Return True if the last INSERT or UPDATE row contained
+ inlined or database-side defaults.
+ """
+
+ raise NotImplementedError()
+
+ def get_rowcount(self):
+ """Return the DBAPI ``cursor.rowcount`` value, or in some
+ cases an interpreted value.
+
+ See :attr:`.ResultProxy.rowcount` for details on this.
+
+ """
+
+ raise NotImplementedError()
+
+
+class Compiled(object):
+ """Represent a compiled SQL or DDL expression.
+
+ The ``__str__`` method of the ``Compiled`` object should produce
+ the actual text of the statement. ``Compiled`` objects are
+ specific to their underlying database dialect, and also may
+ or may not be specific to the columns referenced within a
+ particular set of bind parameters. In no case should the
+ ``Compiled`` object be dependent on the actual values of those
+ bind parameters, even though it may reference those values as
+ defaults.
+ """
+
+ def __init__(self, dialect, statement, bind=None):
+ """Construct a new ``Compiled`` object.
+
+ :param dialect: ``Dialect`` to compile against.
+
+ :param statement: ``ClauseElement`` to be compiled.
+
+ :param bind: Optional Engine or Connection to compile this
+ statement against.
+ """
+
+ self.dialect = dialect
+ self.bind = bind
+ if statement is not None:
+ self.statement = statement
+ self.can_execute = statement.supports_execution
+ self.string = self.process(self.statement)
+
+ @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
+ "within the constructor.")
+ def compile(self):
+ """Produce the internal string representation of this element."""
+ pass
+
+ @property
+ def sql_compiler(self):
+ """Return a Compiled that is capable of processing SQL expressions.
+
+ If this compiler is one, it would likely just return 'self'.
+
+ """
+
+ raise NotImplementedError()
+
+ def process(self, obj, **kwargs):
+ return obj._compiler_dispatch(self, **kwargs)
+
+ def __str__(self):
+ """Return the string text of the generated SQL or DDL."""
+
+ return self.string or ''
+
+ def construct_params(self, params=None):
+ """Return the bind params for this compiled object.
+
+ :param params: a dict of string/object pairs whose values will
+ override bind values compiled in to the
+ statement.
+ """
+
+ raise NotImplementedError()
+
+ @property
+ def params(self):
+ """Return the bind params for this compiled object."""
+ return self.construct_params()
+
+ def execute(self, *multiparams, **params):
+ """Execute this compiled object."""
+
+ e = self.bind
+ if e is None:
+ raise exc.UnboundExecutionError(
+ "This Compiled object is not bound to any Engine "
+ "or Connection.")
+ return e._execute_compiled(self, multiparams, params)
+
+ def scalar(self, *multiparams, **params):
+ """Execute this compiled object and return the result's
+ scalar value."""
+
+ return self.execute(*multiparams, **params).scalar()
+
+
+class TypeCompiler(object):
+ """Produces DDL specification for TypeEngine objects."""
+
+ def __init__(self, dialect):
+ self.dialect = dialect
+
+ def process(self, type_):
+ return type_._compiler_dispatch(self)
+
+
+class Connectable(object):
+ """Interface for an object which supports execution of SQL constructs.
+
+ The two implementations of :class:`.Connectable` are :class:`.Connection` and
+ :class:`.Engine`.
+
+ Connectable must also implement the 'dialect' member which references a
+ :class:`.Dialect` instance.
+
+ """
+
+ dispatch = event.dispatcher(events.ConnectionEvents)
+
+
+ def connect(self, **kwargs):
+ """Return a :class:`.Connection` object.
+
+ Depending on context, this may be ``self`` if this object
+ is already an instance of :class:`.Connection`, or a newly
+ procured :class:`.Connection` if this object is an instance
+ of :class:`.Engine`.
+
+ """
+
+ def contextual_connect(self):
+ """Return a :class:`.Connection` object which may be part of an ongoing
+ context.
+
+ Depending on context, this may be ``self`` if this object
+ is already an instance of :class:`.Connection`, or a newly
+ procured :class:`.Connection` if this object is an instance
+ of :class:`.Engine`.
+
+ """
+
+ raise NotImplementedError()
+
+ @util.deprecated("0.7", "Use the create() method on the given schema "
+ "object directly, i.e. :meth:`.Table.create`, "
+ ":meth:`.Index.create`, :meth:`.MetaData.create_all`")
+ def create(self, entity, **kwargs):
+ """Emit CREATE statements for the given schema entity."""
+
+ raise NotImplementedError()
+
+ @util.deprecated("0.7", "Use the drop() method on the given schema "
+ "object directly, i.e. :meth:`.Table.drop`, "
+ ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
+ def drop(self, entity, **kwargs):
+ """Emit DROP statements for the given schema entity."""
+
+ raise NotImplementedError()
+
+ def execute(self, object, *multiparams, **params):
+ """Executes the given construct and returns a :class:`.ResultProxy`."""
+ raise NotImplementedError()
+
+ def scalar(self, object, *multiparams, **params):
+ """Executes and returns the first column of the first row.
+
+ The underlying cursor is closed after execution.
+ """
+ raise NotImplementedError()
+
+ def _run_visitor(self, visitorcallable, element,
+ **kwargs):
+ raise NotImplementedError()
+
+ def _execute_clauseelement(self, elem, multiparams=None, params=None):
+ raise NotImplementedError()
+
--- /dev/null
+# engine/result.py
+# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Define result set constructs including :class:`.ResultProxy`
+and :class:`.RowProxy."""
+
+
+from itertools import izip
+from .. import exc, types, util
+from ..sql import expression
+import collections
+
+# This reconstructor is necessary so that pickles with the C extension or
+# without use the same Binary format.
+try:
+ # We need a different reconstructor on the C extension so that we can
+ # add extra checks that fields have correctly been initialized by
+ # __setstate__.
+ from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
+
+ # The extra function embedding is needed so that the
+ # reconstructor function has the same signature whether or not
+ # the extension is present.
+ def rowproxy_reconstructor(cls, state):
+ return safe_rowproxy_reconstructor(cls, state)
+except ImportError:
+ def rowproxy_reconstructor(cls, state):
+ obj = cls.__new__(cls)
+ obj.__setstate__(state)
+ return obj
+
+try:
+ from sqlalchemy.cresultproxy import BaseRowProxy
+except ImportError:
+ class BaseRowProxy(object):
+ __slots__ = ('_parent', '_row', '_processors', '_keymap')
+
+ def __init__(self, parent, row, processors, keymap):
+ """RowProxy objects are constructed by ResultProxy objects."""
+
+ self._parent = parent
+ self._row = row
+ self._processors = processors
+ self._keymap = keymap
+
+ def __reduce__(self):
+ return (rowproxy_reconstructor,
+ (self.__class__, self.__getstate__()))
+
+ def values(self):
+ """Return the values represented by this RowProxy as a list."""
+ return list(self)
+
+ def __iter__(self):
+ for processor, value in izip(self._processors, self._row):
+ if processor is None:
+ yield value
+ else:
+ yield processor(value)
+
+ def __len__(self):
+ return len(self._row)
+
+ def __getitem__(self, key):
+ try:
+ processor, obj, index = self._keymap[key]
+ except KeyError:
+ processor, obj, index = self._parent._key_fallback(key)
+ except TypeError:
+ if isinstance(key, slice):
+ l = []
+ for processor, value in izip(self._processors[key],
+ self._row[key]):
+ if processor is None:
+ l.append(value)
+ else:
+ l.append(processor(value))
+ return tuple(l)
+ else:
+ raise
+ if index is None:
+ raise exc.InvalidRequestError(
+ "Ambiguous column name '%s' in result set! "
+ "try 'use_labels' option on select statement." % key)
+ if processor is not None:
+ return processor(self._row[index])
+ else:
+ return self._row[index]
+
+ def __getattr__(self, name):
+ try:
+ return self[name]
+ except KeyError, e:
+ raise AttributeError(e.args[0])
+
+
+class RowProxy(BaseRowProxy):
+ """Proxy values from a single cursor row.
+
+ Mostly follows "ordered dictionary" behavior, mapping result
+ values to the string-based column name, the integer position of
+ the result in the row, as well as Column instances which can be
+ mapped to the original Columns that produced this result set (for
+ results that correspond to constructed SQL expressions).
+ """
+ __slots__ = ()
+
+ def __contains__(self, key):
+ return self._parent._has_key(self._row, key)
+
+ def __getstate__(self):
+ return {
+ '_parent': self._parent,
+ '_row': tuple(self)
+ }
+
+ def __setstate__(self, state):
+ self._parent = parent = state['_parent']
+ self._row = state['_row']
+ self._processors = parent._processors
+ self._keymap = parent._keymap
+
+ __hash__ = None
+
+ def __eq__(self, other):
+ return other is self or other == tuple(self)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return repr(tuple(self))
+
+ def has_key(self, key):
+ """Return True if this RowProxy contains the given key."""
+
+ return self._parent._has_key(self._row, key)
+
+ def items(self):
+ """Return a list of tuples, each tuple containing a key/value pair."""
+ # TODO: no coverage here
+ return [(key, self[key]) for key in self.iterkeys()]
+
+ def keys(self):
+ """Return the list of keys as strings represented by this RowProxy."""
+
+ return self._parent.keys
+
+ def iterkeys(self):
+ return iter(self._parent.keys)
+
+ def itervalues(self):
+ return iter(self)
+
+try:
+ # Register RowProxy with Sequence,
+ # so sequence protocol is implemented
+ from collections import Sequence
+ Sequence.register(RowProxy)
+except ImportError:
+ pass
+
+
+class ResultMetaData(object):
+ """Handle cursor.description, applying additional info from an execution
+ context."""
+
+ def __init__(self, parent, metadata):
+ self._processors = processors = []
+
+ # We do not strictly need to store the processor in the key mapping,
+ # though it is faster in the Python version (probably because of the
+ # saved attribute lookup self._processors)
+ self._keymap = keymap = {}
+ self.keys = []
+ context = parent.context
+ dialect = context.dialect
+ typemap = dialect.dbapi_type_map
+ translate_colname = context._translate_colname
+ self.case_sensitive = dialect.case_sensitive
+
+ # high precedence key values.
+ primary_keymap = {}
+
+ for i, rec in enumerate(metadata):
+ colname = rec[0]
+ coltype = rec[1]
+
+ if dialect.description_encoding:
+ colname = dialect._description_decoder(colname)
+
+ if translate_colname:
+ colname, untranslated = translate_colname(colname)
+
+ if dialect.requires_name_normalize:
+ colname = dialect.normalize_name(colname)
+
+ if context.result_map:
+ try:
+ name, obj, type_ = context.result_map[colname
+ if self.case_sensitive
+ else colname.lower()]
+ except KeyError:
+ name, obj, type_ = \
+ colname, None, typemap.get(coltype, types.NULLTYPE)
+ else:
+ name, obj, type_ = \
+ colname, None, typemap.get(coltype, types.NULLTYPE)
+
+ processor = type_._cached_result_processor(dialect, coltype)
+
+ processors.append(processor)
+ rec = (processor, obj, i)
+
+ # indexes as keys. This is only needed for the Python version of
+ # RowProxy (the C version uses a faster path for integer indexes).
+ primary_keymap[i] = rec
+
+ # populate primary keymap, looking for conflicts.
+ if primary_keymap.setdefault(
+ name if self.case_sensitive
+ else name.lower(),
+ rec) is not rec:
+ # place a record that doesn't have the "index" - this
+ # is interpreted later as an AmbiguousColumnError,
+ # but only when actually accessed. Columns
+ # colliding by name is not a problem if those names
+ # aren't used; integer and ColumnElement access is always
+ # unambiguous.
+ primary_keymap[name
+ if self.case_sensitive
+ else name.lower()] = (processor, obj, None)
+
+ self.keys.append(colname)
+ if obj:
+ for o in obj:
+ keymap[o] = rec
+
+ if translate_colname and \
+ untranslated:
+ keymap[untranslated] = rec
+
+ # overwrite keymap values with those of the
+ # high precedence keymap.
+ keymap.update(primary_keymap)
+
+ if parent._echo:
+ context.engine.logger.debug(
+ "Col %r", tuple(x[0] for x in metadata))
+
+ @util.pending_deprecation("0.8", "sqlite dialect uses "
+ "_translate_colname() now")
+ def _set_keymap_synonym(self, name, origname):
+ """Set a synonym for the given name.
+
+ Some dialects (SQLite at the moment) may use this to
+ adjust the column names that are significant within a
+ row.
+
+ """
+ rec = (processor, obj, i) = self._keymap[origname if
+ self.case_sensitive
+ else origname.lower()]
+ if self._keymap.setdefault(name, rec) is not rec:
+ self._keymap[name] = (processor, obj, None)
+
+ def _key_fallback(self, key, raiseerr=True):
+ map = self._keymap
+ result = None
+ if isinstance(key, basestring):
+ result = map.get(key if self.case_sensitive else key.lower())
+ # fallback for targeting a ColumnElement to a textual expression
+ # this is a rare use case which only occurs when matching text()
+ # or colummn('name') constructs to ColumnElements, or after a
+ # pickle/unpickle roundtrip
+ elif isinstance(key, expression.ColumnElement):
+ if key._label and (
+ key._label
+ if self.case_sensitive
+ else key._label.lower()) in map:
+ result = map[key._label
+ if self.case_sensitive
+ else key._label.lower()]
+ elif hasattr(key, 'name') and (
+ key.name
+ if self.case_sensitive
+ else key.name.lower()) in map:
+ # match is only on name.
+ result = map[key.name
+ if self.case_sensitive
+ else key.name.lower()]
+ # search extra hard to make sure this
+ # isn't a column/label name overlap.
+ # this check isn't currently available if the row
+ # was unpickled.
+ if result is not None and \
+ result[1] is not None:
+ for obj in result[1]:
+ if key._compare_name_for_result(obj):
+ break
+ else:
+ result = None
+ if result is None:
+ if raiseerr:
+ raise exc.NoSuchColumnError(
+ "Could not locate column in row for column '%s'" %
+ expression._string_or_unprintable(key))
+ else:
+ return None
+ else:
+ map[key] = result
+ return result
+
+ def _has_key(self, row, key):
+ if key in self._keymap:
+ return True
+ else:
+ return self._key_fallback(key, False) is not None
+
+ def __getstate__(self):
+ return {
+ '_pickled_keymap': dict(
+ (key, index)
+ for key, (processor, obj, index) in self._keymap.iteritems()
+ if isinstance(key, (basestring, int))
+ ),
+ 'keys': self.keys,
+ "case_sensitive": self.case_sensitive,
+ }
+
+ def __setstate__(self, state):
+ # the row has been processed at pickling time so we don't need any
+ # processor anymore
+ self._processors = [None for _ in xrange(len(state['keys']))]
+ self._keymap = keymap = {}
+ for key, index in state['_pickled_keymap'].iteritems():
+ # not preserving "obj" here, unfortunately our
+ # proxy comparison fails with the unpickle
+ keymap[key] = (None, None, index)
+ self.keys = state['keys']
+ self.case_sensitive = state['case_sensitive']
+ self._echo = False
+
+
+class ResultProxy(object):
+ """Wraps a DB-API cursor object to provide easier access to row columns.
+
+ Individual columns may be accessed by their integer position,
+ case-insensitive column name, or by ``schema.Column``
+ object. e.g.::
+
+ row = fetchone()
+
+ col1 = row[0] # access via integer position
+
+ col2 = row['col2'] # access via name
+
+ col3 = row[mytable.c.mycol] # access via Column object.
+
+ ``ResultProxy`` also handles post-processing of result column
+ data using ``TypeEngine`` objects, which are referenced from
+ the originating SQL statement that produced this result set.
+
+ """
+
+ _process_row = RowProxy
+ out_parameters = None
+ _can_close_connection = False
+
+ def __init__(self, context):
+ self.context = context
+ self.dialect = context.dialect
+ self.closed = False
+ self.cursor = self._saved_cursor = context.cursor
+ self.connection = context.root_connection
+ self._echo = self.connection._echo and \
+ context.engine._should_log_debug()
+ self._init_metadata()
+
+ def _init_metadata(self):
+ metadata = self._cursor_description()
+ if metadata is None:
+ self._metadata = None
+ else:
+ self._metadata = ResultMetaData(self, metadata)
+
+ def keys(self):
+ """Return the current set of string keys for rows."""
+ if self._metadata:
+ return self._metadata.keys
+ else:
+ return []
+
+ @util.memoized_property
+ def rowcount(self):
+ """Return the 'rowcount' for this result.
+
+ The 'rowcount' reports the number of rows *matched*
+ by the WHERE criterion of an UPDATE or DELETE statement.
+
+ .. note::
+
+ Notes regarding :attr:`.ResultProxy.rowcount`:
+
+
+ * This attribute returns the number of rows *matched*,
+ which is not necessarily the same as the number of rows
+ that were actually *modified* - an UPDATE statement, for example,
+ may have no net change on a given row if the SET values
+ given are the same as those present in the row already.
+ Such a row would be matched but not modified.
+ On backends that feature both styles, such as MySQL,
+ rowcount is configured by default to return the match
+ count in all cases.
+
+ * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
+ with an UPDATE or DELETE statement. Contrary to what the Python
+ DBAPI says, it does *not* return the
+ number of rows available from the results of a SELECT statement
+ as DBAPIs cannot support this functionality when rows are
+ unbuffered.
+
+ * :attr:`.ResultProxy.rowcount` may not be fully implemented by
+ all dialects. In particular, most DBAPIs do not support an
+ aggregate rowcount result from an executemany call.
+ The :meth:`.ResultProxy.supports_sane_rowcount` and
+ :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
+ will report from the dialect if each usage is known to be
+ supported.
+
+ * Statements that use RETURNING may not return a correct
+ rowcount.
+
+ """
+ try:
+ return self.context.rowcount
+ except Exception, e:
+ self.connection._handle_dbapi_exception(
+ e, None, None, self.cursor, self.context)
+ raise
+
+ @property
+ def lastrowid(self):
+ """return the 'lastrowid' accessor on the DBAPI cursor.
+
+ This is a DBAPI specific method and is only functional
+ for those backends which support it, for statements
+ where it is appropriate. It's behavior is not
+ consistent across backends.
+
+ Usage of this method is normally unnecessary when
+ using insert() expression constructs; the
+ :attr:`~ResultProxy.inserted_primary_key` attribute provides a
+ tuple of primary key values for a newly inserted row,
+ regardless of database backend.
+
+ """
+ try:
+ return self._saved_cursor.lastrowid
+ except Exception, e:
+ self.connection._handle_dbapi_exception(
+ e, None, None,
+ self._saved_cursor, self.context)
+ raise
+
+ @property
+ def returns_rows(self):
+ """True if this :class:`.ResultProxy` returns rows.
+
+ I.e. if it is legal to call the methods
+ :meth:`~.ResultProxy.fetchone`,
+ :meth:`~.ResultProxy.fetchmany`
+ :meth:`~.ResultProxy.fetchall`.
+
+ """
+ return self._metadata is not None
+
+ @property
+ def is_insert(self):
+ """True if this :class:`.ResultProxy` is the result
+ of a executing an expression language compiled
+ :func:`.expression.insert` construct.
+
+ When True, this implies that the
+ :attr:`inserted_primary_key` attribute is accessible,
+ assuming the statement did not include
+ a user defined "returning" construct.
+
+ """
+ return self.context.isinsert
+
+ def _cursor_description(self):
+ """May be overridden by subclasses."""
+
+ return self._saved_cursor.description
+
+ def close(self, _autoclose_connection=True):
+ """Close this ResultProxy.
+
+ Closes the underlying DBAPI cursor corresponding to the execution.
+
+ Note that any data cached within this ResultProxy is still available.
+ For some types of results, this may include buffered rows.
+
+ If this ResultProxy was generated from an implicit execution,
+ the underlying Connection will also be closed (returns the
+ underlying DBAPI connection to the connection pool.)
+
+ This method is called automatically when:
+
+ * all result rows are exhausted using the fetchXXX() methods.
+ * cursor.description is None.
+
+ """
+
+ if not self.closed:
+ self.closed = True
+ self.connection._safe_close_cursor(self.cursor)
+ if _autoclose_connection and \
+ self.connection.should_close_with_result:
+ self.connection.close()
+ # allow consistent errors
+ self.cursor = None
+
+ def __iter__(self):
+ while True:
+ row = self.fetchone()
+ if row is None:
+ raise StopIteration
+ else:
+ yield row
+
+ @util.memoized_property
+ def inserted_primary_key(self):
+ """Return the primary key for the row just inserted.
+
+ The return value is a list of scalar values
+ corresponding to the list of primary key columns
+ in the target table.
+
+ This only applies to single row :func:`.insert`
+ constructs which did not explicitly specify
+ :meth:`.Insert.returning`.
+
+ Note that primary key columns which specify a
+ server_default clause,
+ or otherwise do not qualify as "autoincrement"
+ columns (see the notes at :class:`.Column`), and were
+ generated using the database-side default, will
+ appear in this list as ``None`` unless the backend
+ supports "returning" and the insert statement executed
+ with the "implicit returning" enabled.
+
+ Raises :class:`.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() construct.
+
+ """
+
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled "
+ "expression construct.")
+ elif not self.context.isinsert:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() "
+ "expression construct.")
+ elif self.context._is_explicit_returning:
+ raise exc.InvalidRequestError(
+ "Can't call inserted_primary_key "
+ "when returning() "
+ "is used.")
+
+ return self.context.inserted_primary_key
+
+ def last_updated_params(self):
+ """Return the collection of updated parameters from this
+ execution.
+
+ Raises :class:`.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an update() construct.
+
+ """
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled "
+ "expression construct.")
+ elif not self.context.isupdate:
+ raise exc.InvalidRequestError(
+ "Statement is not an update() "
+ "expression construct.")
+ elif self.context.executemany:
+ return self.context.compiled_parameters
+ else:
+ return self.context.compiled_parameters[0]
+
+ def last_inserted_params(self):
+ """Return the collection of inserted parameters from this
+ execution.
+
+ Raises :class:`.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() construct.
+
+ """
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled "
+ "expression construct.")
+ elif not self.context.isinsert:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() "
+ "expression construct.")
+ elif self.context.executemany:
+ return self.context.compiled_parameters
+ else:
+ return self.context.compiled_parameters[0]
+
+ def lastrow_has_defaults(self):
+ """Return ``lastrow_has_defaults()`` from the underlying
+ :class:`.ExecutionContext`.
+
+ See :class:`.ExecutionContext` for details.
+
+ """
+
+ return self.context.lastrow_has_defaults()
+
+ def postfetch_cols(self):
+ """Return ``postfetch_cols()`` from the underlying
+ :class:`.ExecutionContext`.
+
+ See :class:`.ExecutionContext` for details.
+
+ Raises :class:`.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() or update() construct.
+
+ """
+
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled "
+ "expression construct.")
+ elif not self.context.isinsert and not self.context.isupdate:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() or update() "
+ "expression construct.")
+ return self.context.postfetch_cols
+
+ def prefetch_cols(self):
+ """Return ``prefetch_cols()`` from the underlying
+ :class:`.ExecutionContext`.
+
+ See :class:`.ExecutionContext` for details.
+
+ Raises :class:`.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() or update() construct.
+
+ """
+
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled "
+ "expression construct.")
+ elif not self.context.isinsert and not self.context.isupdate:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() or update() "
+ "expression construct.")
+ return self.context.prefetch_cols
+
+ def supports_sane_rowcount(self):
+ """Return ``supports_sane_rowcount`` from the dialect.
+
+ See :attr:`.ResultProxy.rowcount` for background.
+
+ """
+
+ return self.dialect.supports_sane_rowcount
+
+ def supports_sane_multi_rowcount(self):
+ """Return ``supports_sane_multi_rowcount`` from the dialect.
+
+ See :attr:`.ResultProxy.rowcount` for background.
+
+ """
+
+ return self.dialect.supports_sane_multi_rowcount
+
+ def _fetchone_impl(self):
+ try:
+ return self.cursor.fetchone()
+ except AttributeError:
+ self._non_result()
+
+ def _fetchmany_impl(self, size=None):
+ try:
+ if size is None:
+ return self.cursor.fetchmany()
+ else:
+ return self.cursor.fetchmany(size)
+ except AttributeError:
+ self._non_result()
+
+ def _fetchall_impl(self):
+ try:
+ return self.cursor.fetchall()
+ except AttributeError:
+ self._non_result()
+
+ def _non_result(self):
+ if self._metadata is None:
+ raise exc.ResourceClosedError(
+ "This result object does not return rows. "
+ "It has been closed automatically.",
+ )
+ else:
+ raise exc.ResourceClosedError("This result object is closed.")
+
+ def process_rows(self, rows):
+ process_row = self._process_row
+ metadata = self._metadata
+ keymap = metadata._keymap
+ processors = metadata._processors
+ if self._echo:
+ log = self.context.engine.logger.debug
+ l = []
+ for row in rows:
+ log("Row %r", row)
+ l.append(process_row(metadata, row, processors, keymap))
+ return l
+ else:
+ return [process_row(metadata, row, processors, keymap)
+ for row in rows]
+
+ def fetchall(self):
+ """Fetch all rows, just like DB-API ``cursor.fetchall()``."""
+
+ try:
+ l = self.process_rows(self._fetchall_impl())
+ self.close()
+ return l
+ except Exception, e:
+ self.connection._handle_dbapi_exception(
+ e, None, None,
+ self.cursor, self.context)
+ raise
+
+ def fetchmany(self, size=None):
+ """Fetch many rows, just like DB-API
+ ``cursor.fetchmany(size=cursor.arraysize)``.
+
+ If rows are present, the cursor remains open after this is called.
+ Else the cursor is automatically closed and an empty list is returned.
+
+ """
+
+ try:
+ l = self.process_rows(self._fetchmany_impl(size))
+ if len(l) == 0:
+ self.close()
+ return l
+ except Exception, e:
+ self.connection._handle_dbapi_exception(
+ e, None, None,
+ self.cursor, self.context)
+ raise
+
+ def fetchone(self):
+ """Fetch one row, just like DB-API ``cursor.fetchone()``.
+
+ If a row is present, the cursor remains open after this is called.
+ Else the cursor is automatically closed and None is returned.
+
+ """
+ try:
+ row = self._fetchone_impl()
+ if row is not None:
+ return self.process_rows([row])[0]
+ else:
+ self.close()
+ return None
+ except Exception, e:
+ self.connection._handle_dbapi_exception(
+ e, None, None,
+ self.cursor, self.context)
+ raise
+
+ def first(self):
+ """Fetch the first row and then close the result set unconditionally.
+
+ Returns None if no row is present.
+
+ """
+ if self._metadata is None:
+ self._non_result()
+
+ try:
+ row = self._fetchone_impl()
+ except Exception, e:
+ self.connection._handle_dbapi_exception(
+ e, None, None,
+ self.cursor, self.context)
+ raise
+
+ try:
+ if row is not None:
+ return self.process_rows([row])[0]
+ else:
+ return None
+ finally:
+ self.close()
+
+ def scalar(self):
+ """Fetch the first column of the first row, and close the result set.
+
+ Returns None if no row is present.
+
+ """
+ row = self.first()
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+class BufferedRowResultProxy(ResultProxy):
+ """A ResultProxy with row buffering behavior.
+
+ ``ResultProxy`` that buffers the contents of a selection of rows
+ before ``fetchone()`` is called. This is to allow the results of
+ ``cursor.description`` to be available immediately, when
+ interfacing with a DB-API that requires rows to be consumed before
+ this information is available (currently psycopg2, when used with
+ server-side cursors).
+
+ The pre-fetching behavior fetches only one row initially, and then
+ grows its buffer size by a fixed amount with each successive need
+ for additional rows up to a size of 100.
+ """
+
+ def _init_metadata(self):
+ self.__buffer_rows()
+ super(BufferedRowResultProxy, self)._init_metadata()
+
+ # this is a "growth chart" for the buffering of rows.
+ # each successive __buffer_rows call will use the next
+ # value in the list for the buffer size until the max
+ # is reached
+ size_growth = {
+ 1: 5,
+ 5: 10,
+ 10: 20,
+ 20: 50,
+ 50: 100,
+ 100: 250,
+ 250: 500,
+ 500: 1000
+ }
+
+ def __buffer_rows(self):
+ size = getattr(self, '_bufsize', 1)
+ self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
+ self._bufsize = self.size_growth.get(size, size)
+
+ def _fetchone_impl(self):
+ if self.closed:
+ return None
+ if not self.__rowbuffer:
+ self.__buffer_rows()
+ if not self.__rowbuffer:
+ return None
+ return self.__rowbuffer.popleft()
+
+ def _fetchmany_impl(self, size=None):
+ if size is None:
+ return self._fetchall_impl()
+ result = []
+ for x in range(0, size):
+ row = self._fetchone_impl()
+ if row is None:
+ break
+ result.append(row)
+ return result
+
+ def _fetchall_impl(self):
+ self.__rowbuffer.extend(self.cursor.fetchall())
+ ret = self.__rowbuffer
+ self.__rowbuffer = collections.deque()
+ return ret
+
+
+class FullyBufferedResultProxy(ResultProxy):
+ """A result proxy that buffers rows fully upon creation.
+
+ Used for operations where a result is to be delivered
+ after the database conversation can not be continued,
+ such as MSSQL INSERT...OUTPUT after an autocommit.
+
+ """
+ def _init_metadata(self):
+ super(FullyBufferedResultProxy, self)._init_metadata()
+ self.__rowbuffer = self._buffer_rows()
+
+ def _buffer_rows(self):
+ return collections.deque(self.cursor.fetchall())
+
+ def _fetchone_impl(self):
+ if self.__rowbuffer:
+ return self.__rowbuffer.popleft()
+ else:
+ return None
+
+ def _fetchmany_impl(self, size=None):
+ if size is None:
+ return self._fetchall_impl()
+ result = []
+ for x in range(0, size):
+ row = self._fetchone_impl()
+ if row is None:
+ break
+ result.append(row)
+ return result
+
+ def _fetchall_impl(self):
+ ret = self.__rowbuffer
+ self.__rowbuffer = collections.deque()
+ return ret
+
+class BufferedColumnRow(RowProxy):
+ def __init__(self, parent, row, processors, keymap):
+ # preprocess row
+ row = list(row)
+ # this is a tad faster than using enumerate
+ index = 0
+ for processor in parent._orig_processors:
+ if processor is not None:
+ row[index] = processor(row[index])
+ index += 1
+ row = tuple(row)
+ super(BufferedColumnRow, self).__init__(parent, row,
+ processors, keymap)
+
+
+class BufferedColumnResultProxy(ResultProxy):
+ """A ResultProxy with column buffering behavior.
+
+ ``ResultProxy`` that loads all columns into memory each time
+ fetchone() is called. If fetchmany() or fetchall() are called,
+ the full grid of results is fetched. This is to operate with
+ databases where result rows contain "live" results that fall out
+ of scope unless explicitly fetched. Currently this includes
+ cx_Oracle LOB objects.
+
+ """
+
+ _process_row = BufferedColumnRow
+
+ def _init_metadata(self):
+ super(BufferedColumnResultProxy, self)._init_metadata()
+ metadata = self._metadata
+ # orig_processors will be used to preprocess each row when they are
+ # constructed.
+ metadata._orig_processors = metadata._processors
+ # replace the all type processors by None processors.
+ metadata._processors = [None for _ in xrange(len(metadata.keys))]
+ keymap = {}
+ for k, (func, obj, index) in metadata._keymap.iteritems():
+ keymap[k] = (None, obj, index)
+ self._metadata._keymap = keymap
+
+ def fetchall(self):
+ # can't call cursor.fetchall(), since rows must be
+ # fully processed before requesting more from the DBAPI.
+ l = []
+ while True:
+ row = self.fetchone()
+ if row is None:
+ break
+ l.append(row)
+ return l
+
+ def fetchmany(self, size=None):
+ # can't call cursor.fetchmany(), since rows must be
+ # fully processed before requesting more from the DBAPI.
+ if size is None:
+ return self.fetchall()
+ l = []
+ for i in xrange(size):
+ row = self.fetchone()
+ if row is None:
+ break
+ l.append(row)
+ return l
invoked automatically when the threadlocal engine strategy is used.
"""
-from sqlalchemy import util, event
-from sqlalchemy.engine import base
+from .. import util, event
+from . import base
import weakref
class TLConnection(base.Connection):
import re
import urllib
from .. import exc, util
-from . import base
+from . import Dialect
class URL(object):
# actual class
if hasattr(cls, 'dialect') and \
isinstance(cls.dialect, type) and \
- issubclass(cls.dialect, base.Dialect):
+ issubclass(cls.dialect, Dialect):
return cls.dialect
else:
return cls
--- /dev/null
+# engine/util.py
+# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from .. import util
+
+def _coerce_config(configuration, prefix):
+ """Convert configuration values to expected types."""
+
+ options = dict((key[len(prefix):], configuration[key])
+ for key in configuration
+ if key.startswith(prefix))
+ for option, type_ in (
+ ('convert_unicode', util.bool_or_str('force')),
+ ('pool_timeout', int),
+ ('echo', util.bool_or_str('debug')),
+ ('echo_pool', util.bool_or_str('debug')),
+ ('pool_recycle', int),
+ ('pool_size', int),
+ ('max_overflow', int),
+ ('pool_threadlocal', bool),
+ ('use_native_unicode', bool),
+ ):
+ util.coerce_kw_type(options, option, type_)
+ return options
+
+
+def _distill_params(multiparams, params):
+ """Given arguments from the calling form *multiparams, **params,
+ return a list of bind parameter structures, usually a list of
+ dictionaries.
+
+ In the case of 'raw' execution which accepts positional parameters,
+ it may be a list of tuples or lists.
+
+ """
+
+ if not multiparams:
+ if params:
+ return [params]
+ else:
+ return []
+ elif len(multiparams) == 1:
+ zero = multiparams[0]
+ if isinstance(zero, (list, tuple)):
+ if not zero or hasattr(zero[0], '__iter__') and \
+ not hasattr(zero[0], 'strip'):
+ # execute(stmt, [{}, {}, {}, ...])
+ # execute(stmt, [(), (), (), ...])
+ return zero
+ else:
+ # execute(stmt, ("value", "value"))
+ return [zero]
+ elif hasattr(zero, 'keys'):
+ # execute(stmt, {"key":"value"})
+ return [zero]
+ else:
+ # execute(stmt, "value")
+ return [[zero]]
+ else:
+ if hasattr(multiparams[0], '__iter__') and \
+ not hasattr(multiparams[0], 'strip'):
+ return multiparams
+ else:
+ return [multiparams]
+
+def connection_memoize(key):
+ """Decorator, memoize a function in a connection.info stash.
+
+ Only applicable to functions which take no arguments other than a
+ connection. The memo will be stored in ``connection.info[key]``.
+ """
+
+ @util.decorator
+ def decorated(fn, self, connection):
+ connection = connection.connect()
+ try:
+ return connection.info[key]
+ except KeyError:
+ connection.info[key] = val = fn(self, connection)
+ return val
+
+ return decorated
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Interfaces and abstract types.
+"""Deprecated core event interfaces.
This module is **deprecated** and is superseded by the
event system.
from test.lib.engines import testing_engine
import logging
from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
-from sqlalchemy.engine import base, default
+from sqlalchemy.engine import result as _result, default
from sqlalchemy.engine.base import Connection, Engine
from test.lib import fixtures
import StringIO
eq_(rows, [(i, "t_%d" % i) for i in xrange(1, 6)])
def test_plain(self):
- self._test_proxy(base.ResultProxy)
+ self._test_proxy(_result.ResultProxy)
def test_buffered_row_result_proxy(self):
- self._test_proxy(base.BufferedRowResultProxy)
+ self._test_proxy(_result.BufferedRowResultProxy)
def test_fully_buffered_result_proxy(self):
- self._test_proxy(base.FullyBufferedResultProxy)
+ self._test_proxy(_result.FullyBufferedResultProxy)
def test_buffered_column_result_proxy(self):
- self._test_proxy(base.BufferedColumnResultProxy)
+ self._test_proxy(_result.BufferedColumnResultProxy)
class EngineEventsTest(fixtures.TestBase):
__requires__ = 'ad_hoc_engines',
import StringIO
import sqlalchemy.engine.url as url
from sqlalchemy import create_engine, engine_from_config, exc, pool
-from sqlalchemy.engine import _coerce_config
+from sqlalchemy.engine.util import _coerce_config
from sqlalchemy.engine.default import DefaultDialect
import sqlalchemy as tsa
from test.lib import fixtures, testing
}
prefixed = dict(ini.items('prefixed'))
- self.assert_(tsa.engine._coerce_config(prefixed, 'sqlalchemy.')
+ self.assert_(_coerce_config(prefixed, 'sqlalchemy.')
== expected)
plain = dict(ini.items('plain'))
- self.assert_(tsa.engine._coerce_config(plain, '') == expected)
+ self.assert_(_coerce_config(plain, '') == expected)
def test_engine_from_config(self):
dbapi = mock_dbapi
import datetime
from sqlalchemy import *
from sqlalchemy import exc, sql, util
-from sqlalchemy.engine import default, base
+from sqlalchemy.engine import default, result as _result
from test.lib import *
from test.lib.schema import Table, Column
# these proxies don't work with no cursor.description present.
# so they don't apply to this test at the moment.
- # base.FullyBufferedResultProxy,
- # base.BufferedRowResultProxy,
- # base.BufferedColumnResultProxy
+ # result.FullyBufferedResultProxy,
+ # result.BufferedRowResultProxy,
+ # result.BufferedColumnResultProxy
conn = testing.db.connect()
for meth in ('fetchone', 'fetchall', 'first', 'scalar', 'fetchmany'):
)
result = users.outerjoin(addresses).select().execute()
- result = base.BufferedColumnResultProxy(result.context)
+ result = _result.BufferedColumnResultProxy(result.context)
r = result.first()
- assert isinstance(r, base.BufferedColumnRow)
+ assert isinstance(r, _result.BufferedColumnRow)
assert_raises_message(
exc.InvalidRequestError,
"Ambiguous column name",