self.__transaction = None
self.__savepoint_seq = 0
self.should_close_with_result = close_with_result
- if close_with_result:
- util.warn_deprecated_20(
- '"Connectionless" execution, which refers to running '
- "SQL commands using the Engine.execute() (or "
- "some_statement.execute()) method without "
- "calling .connect() or .begin() to get a Connection, is "
- "deprecated and will be removed SQLAlchemy 2.0"
- )
self.__invalid = False
self.__can_reconnect = True
"""
if self.__branch_from:
- util.warn_deprecated(
+ util.warn_deprecated_20(
"The .close() method on a so-called 'branched' connection is "
"deprecated as of 1.4, as are 'branched' connections overall, "
"and will be removed in a future release. If this is a "
"default-handling function, don't close the connection."
)
+
try:
del self.__connection
except AttributeError:
with self.connect() as conn:
conn._run_ddl_visitor(visitorcallable, element, **kwargs)
+ @util.deprecated_20(
+ ":meth:`.Engine.execute`",
+ alternative="All statement execution in SQLAlchemy 2.0 is performed "
+ "by the :meth:`.Connection.execute` method of :class:`.Connection`, "
+ "or in the ORM by the :meth:`.Session.execute` method of "
+ ":class:`.Session`.",
+ )
def execute(self, statement, *multiparams, **params):
"""Executes the given construct and returns a :class:`.ResultProxy`.
connection = self.connect(close_with_result=True)
return connection.execute(statement, *multiparams, **params)
+ @util.deprecated_20(
+ ":meth:`.Engine.scalar`",
+ alternative="All statement execution in SQLAlchemy 2.0 is performed "
+ "by the :meth:`.Connection.execute` method of :class:`.Connection`, "
+ "or in the ORM by the :meth:`.Session.execute` method of "
+ ":class:`.Session`; the :meth:`.Result.scalar` method can then be "
+ "used to return a scalar result.",
+ )
def scalar(self, statement, *multiparams, **params):
return self.execute(statement, *multiparams, **params).scalar()
pass
+class SingletonConstant(Immutable):
+ def __new__(cls, *arg, **kw):
+ return cls._singleton
+
+ @classmethod
+ def _create_singleton(cls):
+ obj = object.__new__(cls)
+ obj.__init__()
+ cls._singleton = obj
+
+
class HasMemoized(object):
def _reset_memoizations(self):
self._memoized_property.expire_instance(self)
def _from_objects(*elements):
- return itertools.chain(*[element._from_objects for element in elements])
+ return itertools.chain.from_iterable(
+ [element._from_objects for element in elements]
+ )
+
+
+def _select_iterables(elements):
+ """expand tables into individual columns in the
+ given list of column expressions.
+
+ """
+ return itertools.chain.from_iterable(
+ [c._select_iterable for c in elements]
+ )
def _generative(fn):
__slots__ = ("statement",)
+ @classmethod
+ def _create(cls, statement, compiler, **kw):
+ # factory construction.
+
+ # specific CompileState classes here will look for
+ # "plugins" in the given statement. From there they will invoke
+ # the appropriate plugin constructor if one is found and return
+ # the alternate CompileState object.
+
+ c = cls.__new__(cls)
+ c.__init__(statement, compiler, **kw)
+ return c
+
def __init__(self, statement, compiler, **kw):
self.statement = statement
s.__dict__ = self.__dict__.copy()
return s
+ def options(self, *options):
+ """Apply options to this statement.
+
+ In the general sense, options are any kind of Python object
+ that can be interpreted by the SQL compiler for the statement.
+ These options can be consumed by specific dialects or specific kinds
+ of compilers.
+
+ The most commonly known kind of option are the ORM level options
+ that apply "eager load" and other loading behaviors to an ORM
+ query. However, options can theoretically be used for many other
+ purposes.
+
+ For background on specific kinds of options for specific kinds of
+ statements, refer to the documentation for those option objects.
+
+ .. versionchanged:: 1.4 - added :meth:`.Generative.options` to
+ Core statement objects towards the goal of allowing unified
+ Core / ORM querying capabilities.
+
+ .. seealso::
+
+ :ref:`deferred_options` - refers to options specific to the usage
+ of ORM queries
+
+ :ref:`relationship_loader_options` - refers to options specific
+ to the usage of ORM queries
+
+ """
+ self._options += options
+
class HasCompileState(Generative):
"""A class that has a :class:`.CompileState` associated with it."""
- _compile_state_cls = CompileState
+ _compile_state_factory = CompileState._create
+
+ _compile_state_plugin = None
class Executable(Generative):
"""
return self._execution_options
+ @util.deprecated_20(
+ ":meth:`.Executable.execute`",
+ alternative="All statement execution in SQLAlchemy 2.0 is performed "
+ "by the :meth:`.Connection.execute` method of :class:`.Connection`, "
+ "or in the ORM by the :meth:`.Session.execute` method of "
+ ":class:`.Session`.",
+ )
def execute(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`."""
e = self.bind
raise exc.UnboundExecutionError(msg)
return e._execute_clauseelement(self, multiparams, params)
+ @util.deprecated_20(
+ ":meth:`.Executable.scalar`",
+ alternative="All statement execution in SQLAlchemy 2.0 is performed "
+ "by the :meth:`.Connection.execute` method of :class:`.Connection`, "
+ "or in the ORM by the :meth:`.Session.execute` method of "
+ ":class:`.Session`; the :meth:`.Result.scalar` method can then be "
+ "used to return a scalar result.",
+ )
def scalar(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`, returning the
result's scalar representation.
import itertools
import re
+from . import base
from . import coercions
from . import crud
from . import elements
self, element, within_columns_clause=False, **kwargs
):
if self.stack and self.dialect.supports_simple_order_by_label:
- selectable = self.stack[-1]["selectable"]
+ compile_state = self.stack[-1]["compile_state"]
- with_cols, only_froms, only_cols = selectable._label_resolve_dict
+ (
+ with_cols,
+ only_froms,
+ only_cols,
+ ) = compile_state._label_resolve_dict
if within_columns_clause:
resolve_dict = only_froms
else:
# compiling the element outside of the context of a SELECT
return self.process(element._text_clause)
- selectable = self.stack[-1]["selectable"]
- with_cols, only_froms, only_cols = selectable._label_resolve_dict
+ compile_state = self.stack[-1]["compile_state"]
+ with_cols, only_froms, only_cols = compile_state._label_resolve_dict
try:
if within_columns_clause:
col = only_froms[element.element]
if s
)
+ def _generate_delimited_and_list(self, clauses, **kw):
+
+ lcc, clauses = elements.BooleanClauseList._process_clauses_for_boolean(
+ operators.and_,
+ elements.True_._singleton,
+ elements.False_._singleton,
+ clauses,
+ )
+ if lcc == 1:
+ return clauses[0]._compiler_dispatch(self, **kw)
+ else:
+ separator = OPERATORS[operators.and_]
+ return separator.join(
+ s
+ for s in (c._compiler_dispatch(self, **kw) for c in clauses)
+ if s
+ )
+
def visit_clauselist(self, clauselist, **kw):
sep = clauselist.operator
if sep is None:
self, cs, asfrom=False, compound_index=0, **kwargs
):
toplevel = not self.stack
+
+ compile_state = cs._compile_state_factory(cs, self, **kwargs)
+
+ if toplevel:
+ self.compile_state = compile_state
+
entry = self._default_stack_entry if toplevel else self.stack[-1]
need_result_map = toplevel or (
compound_index == 0
"correlate_froms": entry["correlate_froms"],
"asfrom_froms": entry["asfrom_froms"],
"selectable": cs,
+ "compile_state": compile_state,
"need_result_map_for_compound": need_result_map,
}
)
from_linter=None,
**kw
):
-
if from_linter and operators.is_comparison(binary.operator):
from_linter.edges.update(
itertools.product(
need_column_expressions=False,
):
"""produce labeled columns present in a select()."""
-
impl = column.type.dialect_impl(self.dialect)
if impl._has_column_expression and (
or isinstance(column, functions.FunctionElement)
)
):
- result_expr = _CompileLabel(col_expr, column.anon_label)
+ result_expr = _CompileLabel(
+ col_expr,
+ column.anon_label
+ if not column_is_repeated
+ else column._dedupe_label_anon_label,
+ )
elif col_expr is not column:
# TODO: are we sure "column" has a .name and .key here ?
# assert isinstance(column, elements.ColumnClause)
[("correlate_froms", frozenset()), ("asfrom_froms", frozenset())]
)
- def _display_froms_for_select(self, select, asfrom, lateral=False):
+ def _display_froms_for_select(
+ self, select_stmt, asfrom, lateral=False, **kw
+ ):
# utility method to help external dialects
# get the correct from list for a select.
# specifically the oracle dialect needs this feature
toplevel = not self.stack
entry = self._default_stack_entry if toplevel else self.stack[-1]
+ compile_state = select_stmt._compile_state_factory(select_stmt, self)
+
correlate_froms = entry["correlate_froms"]
asfrom_froms = entry["asfrom_froms"]
if asfrom and not lateral:
- froms = select._get_display_froms(
+ froms = compile_state._get_display_froms(
explicit_correlate_froms=correlate_froms.difference(
asfrom_froms
),
implicit_correlate_froms=(),
)
else:
- froms = select._get_display_froms(
+ froms = compile_state._get_display_froms(
explicit_correlate_froms=correlate_froms,
implicit_correlate_froms=asfrom_froms,
)
def visit_select(
self,
- select,
+ select_stmt,
asfrom=False,
fromhints=None,
compound_index=0,
**kwargs
):
+ compile_state = select_stmt._compile_state_factory(
+ select_stmt, self, **kwargs
+ )
+ select_stmt = compile_state.statement
+
toplevel = not self.stack
+
+ if toplevel:
+ self.compile_state = compile_state
+
entry = self._default_stack_entry if toplevel else self.stack[-1]
populate_result_map = need_column_expressions = (
del kwargs["add_to_result_map"]
froms = self._setup_select_stack(
- select, entry, asfrom, lateral, compound_index
+ select_stmt, compile_state, entry, asfrom, lateral, compound_index
)
column_clause_args = kwargs.copy()
text = "SELECT " # we're off to a good start !
- if select._hints:
- hint_text, byfrom = self._setup_select_hints(select)
+ if select_stmt._hints:
+ hint_text, byfrom = self._setup_select_hints(select_stmt)
if hint_text:
text += hint_text + " "
else:
byfrom = None
- if select._prefixes:
- text += self._generate_prefixes(select, select._prefixes, **kwargs)
+ if select_stmt._prefixes:
+ text += self._generate_prefixes(
+ select_stmt, select_stmt._prefixes, **kwargs
+ )
- text += self.get_select_precolumns(select, **kwargs)
+ text += self.get_select_precolumns(select_stmt, **kwargs)
# the actual list of columns to print in the SELECT column list.
inner_columns = [
c
for c in [
self._label_select_column(
- select,
+ select_stmt,
column,
populate_result_map,
asfrom,
column_is_repeated=repeated,
need_column_expressions=need_column_expressions,
)
- for name, column, repeated in select._columns_plus_names
+ for name, column, repeated in compile_state.columns_plus_names
]
if c is not None
]
# if this select is a compiler-generated wrapper,
# rewrite the targeted columns in the result map
+ compile_state_wraps_for = select_wraps_for._compile_state_factory(
+ select_wraps_for, self, **kwargs
+ )
+
translate = dict(
zip(
[
name
- for (key, name, repeated) in select._columns_plus_names
+ for (
+ key,
+ name,
+ repeated,
+ ) in compile_state.columns_plus_names
],
[
name
key,
name,
repeated,
- ) in select_wraps_for._columns_plus_names
+ ) in compile_state_wraps_for.columns_plus_names
],
)
)
]
text = self._compose_select_body(
- text, select, inner_columns, froms, byfrom, toplevel, kwargs
+ text,
+ select_stmt,
+ compile_state,
+ inner_columns,
+ froms,
+ byfrom,
+ toplevel,
+ kwargs,
)
- if select._statement_hints:
+ if select_stmt._statement_hints:
per_dialect = [
ht
- for (dialect_name, ht) in select._statement_hints
+ for (dialect_name, ht) in select_stmt._statement_hints
if dialect_name in ("*", self.dialect.name)
]
if per_dialect:
if self.ctes and toplevel:
text = self._render_cte_clause() + text
- if select._suffixes:
+ if select_stmt._suffixes:
text += " " + self._generate_prefixes(
- select, select._suffixes, **kwargs
+ select_stmt, select_stmt._suffixes, **kwargs
)
self.stack.pop(-1)
return hint_text, byfrom
def _setup_select_stack(
- self, select, entry, asfrom, lateral, compound_index
+ self, select, compile_state, entry, asfrom, lateral, compound_index
):
correlate_froms = entry["correlate_froms"]
asfrom_froms = entry["asfrom_froms"]
if select_0._is_select_container:
select_0 = select_0.element
numcols = len(select_0.selected_columns)
- # numcols = len(select_0._columns_plus_names)
- if len(select._columns_plus_names) != numcols:
+
+ if len(compile_state.columns_plus_names) != numcols:
raise exc.CompileError(
"All selectables passed to "
"CompoundSelect must have identical numbers of "
)
if asfrom and not lateral:
- froms = select._get_display_froms(
+ froms = compile_state._get_display_froms(
explicit_correlate_froms=correlate_froms.difference(
asfrom_froms
),
implicit_correlate_froms=(),
)
else:
- froms = select._get_display_froms(
+ froms = compile_state._get_display_froms(
explicit_correlate_froms=correlate_froms,
implicit_correlate_froms=asfrom_froms,
)
"asfrom_froms": new_correlate_froms,
"correlate_froms": all_correlate_froms,
"selectable": select,
+ "compile_state": compile_state,
}
self.stack.append(new_entry)
return froms
def _compose_select_body(
- self, text, select, inner_columns, froms, byfrom, toplevel, kwargs
+ self,
+ text,
+ select,
+ compile_state,
+ inner_columns,
+ froms,
+ byfrom,
+ toplevel,
+ kwargs,
):
text += ", ".join(inner_columns)
else:
text += self.default_from()
- if select._whereclause is not None:
- t = select._whereclause._compiler_dispatch(
- self, from_linter=from_linter, **kwargs
+ if select._where_criteria:
+ t = self._generate_delimited_and_list(
+ select._where_criteria, from_linter=from_linter, **kwargs
)
if t:
text += " \nWHERE " + t
):
from_linter.warn()
- if select._group_by_clause.clauses:
+ if select._group_by_clauses:
text += self.group_by_clause(select, **kwargs)
- if select._having is not None:
- t = select._having._compiler_dispatch(self, **kwargs)
+ if select._having_criteria:
+ t = self._generate_delimited_and_list(
+ select._having_criteria, **kwargs
+ )
if t:
text += " \nHAVING " + t
- if select._order_by_clause.clauses:
+ if select._order_by_clauses:
text += self.order_by_clause(select, **kwargs)
if (
def group_by_clause(self, select, **kw):
"""allow dialects to customize how GROUP BY is rendered."""
- group_by = select._group_by_clause._compiler_dispatch(self, **kw)
+ group_by = self._generate_delimited_list(
+ select._group_by_clauses, OPERATORS[operators.comma_op], **kw
+ )
if group_by:
return " GROUP BY " + group_by
else:
def order_by_clause(self, select, **kw):
"""allow dialects to customize how ORDER BY is rendered."""
- order_by = select._order_by_clause._compiler_dispatch(self, **kw)
+ order_by = self._generate_delimited_list(
+ select._order_by_clauses, OPERATORS[operators.comma_op], **kw
+ )
+
if order_by:
return " ORDER BY " + order_by
else:
def visit_insert(self, insert_stmt, **kw):
- compile_state = insert_stmt._compile_state_cls(
- insert_stmt, self, isinsert=True, **kw
+ compile_state = insert_stmt._compile_state_factory(
+ insert_stmt, self, **kw
)
insert_stmt = compile_state.statement
)
def visit_update(self, update_stmt, **kw):
- compile_state = update_stmt._compile_state_cls(
- update_stmt, self, isupdate=True, **kw
+ compile_state = update_stmt._compile_state_factory(
+ update_stmt, self, **kw
)
update_stmt = compile_state.statement
text += " " + extra_from_text
if update_stmt._where_criteria:
- t = self._generate_delimited_list(
- update_stmt._where_criteria, OPERATORS[operators.and_], **kw
+ t = self._generate_delimited_and_list(
+ update_stmt._where_criteria, **kw
)
if t:
text += " WHERE " + t
return from_table._compiler_dispatch(self, asfrom=True, iscrud=True)
def visit_delete(self, delete_stmt, **kw):
- compile_state = delete_stmt._compile_state_cls(
- delete_stmt, self, isdelete=True, **kw
+ compile_state = delete_stmt._compile_state_factory(
+ delete_stmt, self, **kw
)
delete_stmt = compile_state.statement
text += " " + extra_from_text
if delete_stmt._where_criteria:
- t = self._generate_delimited_list(
- delete_stmt._where_criteria, OPERATORS[operators.and_], **kw
+ t = self._generate_delimited_and_list(
+ delete_stmt._where_criteria, **kw
)
if t:
text += " WHERE " + t
def returning_clause(self, stmt, returning_cols):
columns = [
self._label_select_column(None, c, True, False, {})
- for c in elements._select_iterables(returning_cols)
+ for c in base._select_iterables(returning_cols)
]
return "RETURNING " + ", ".join(columns)
isdelete = False
isinsert = False
+ @classmethod
+ def _create_insert(cls, statement, compiler, **kw):
+ return DMLState(statement, compiler, isinsert=True, **kw)
+
+ @classmethod
+ def _create_update(cls, statement, compiler, **kw):
+ return DMLState(statement, compiler, isupdate=True, **kw)
+
+ @classmethod
+ def _create_delete(cls, statement, compiler, **kw):
+ return DMLState(statement, compiler, isdelete=True, **kw)
+
def __init__(
self,
statement,
_hints = util.immutabledict()
named_with_column = False
- _compile_state_cls = DMLState
-
@classmethod
def _constructor_20_deprecations(cls, fn_name, clsname, names):
_supports_multi_parameters = True
+ _compile_state_factory = DMLState._create_insert
+
select = None
include_insert_from_select_defaults = False
__visit_name__ = "update"
+ _compile_state_factory = DMLState._create_update
+
_traverse_internals = (
[
("table", InternalTraversal.dp_clauseelement),
__visit_name__ = "delete"
+ _compile_state_factory = DMLState._create_delete
+
_traverse_internals = (
[
("table", InternalTraversal.dp_clauseelement),
from .base import Immutable
from .base import NO_ARG
from .base import PARSE_AUTOCOMMIT
+from .base import SingletonConstant
from .coercions import _document_text_coercion
from .traversals import _copy_internals
from .traversals import _get_children
"""
return traversals.compare(self, other, **kw)
- def _copy_internals(self, **kw):
+ def _copy_internals(self, omit_attrs=(), **kw):
"""Reassign internal elements to be clones of themselves.
Called during a copy-and-traverse operation on newly
for attrname, obj, meth in _copy_internals.run_generated_dispatch(
self, traverse_internals, "_generated_copy_internals_traversal"
):
+ if attrname in omit_attrs:
+ continue
+
if obj is not None:
result = meth(self, obj, **kw)
if result is not None:
setattr(self, attrname, result)
- def get_children(self, omit_attrs=None, **kw):
+ def get_children(self, omit_attrs=(), **kw):
r"""Return immediate child :class:`.Traversible` elements of this
:class:`.Traversible`.
for attrname, obj, meth in _get_children.run_generated_dispatch(
self, traverse_internals, "_generated_get_children_traversal"
):
- if obj is None or omit_attrs and attrname in omit_attrs:
+ if obj is None or attrname in omit_attrs:
continue
result.extend(meth(obj, **kw))
return result
@util.memoized_property
def _dedupe_label_anon_label(self):
- return self._anon_label(getattr(self, "_label", "anon") + "_")
+ label = getattr(self, "_label", None) or "anon"
+ return self._anon_label(label + "_")
class WrapsColumnExpression(object):
)
_is_implicitly_boolean = False
+ _render_label_in_columns_clause = False
+
def __and__(self, other):
# support use in select.where(), query.filter()
return and_(self, other)
@classmethod
@util.deprecated_params(
- autocommit=(
- "0.6",
- "The :paramref:`.text.autocommit` parameter is deprecated and "
- "will be removed in a future release. Please use the "
- ":paramref:`.Connection.execution_options.autocommit` parameter "
- "in conjunction with the :meth:`.Executable.execution_options` "
- "method.",
- ),
bindparams=(
"0.9",
"The :paramref:`.text.bindparams` parameter "
)
@_document_text_coercion("text", ":func:`.text`", ":paramref:`.text.text`")
def _create_text(
- self, text, bind=None, bindparams=None, typemap=None, autocommit=None
+ self, text, bind=None, bindparams=None, typemap=None,
):
r"""Construct a new :class:`.TextClause` clause, representing
a textual SQL string directly.
to specify bind parameters; they will be compiled to their
engine-specific format.
- :param autocommit: whether or not to set the "autocommit" execution
- option for this :class:`.TextClause` object.
-
:param bind:
an optional connection or engine to be used for this text query.
stmt = stmt.bindparams(*bindparams)
if typemap:
stmt = stmt.columns(**typemap)
- if autocommit is not None:
- stmt = stmt.execution_options(autocommit=autocommit)
return stmt
return self
-class Null(roles.ConstExprRole, ColumnElement):
+class Null(SingletonConstant, roles.ConstExprRole, ColumnElement):
"""Represent the NULL keyword in a SQL statement.
:class:`.Null` is accessed as a constant via the
return Null()
-class False_(roles.ConstExprRole, ColumnElement):
+Null._create_singleton()
+
+
+class False_(SingletonConstant, roles.ConstExprRole, ColumnElement):
"""Represent the ``false`` keyword, or equivalent, in a SQL statement.
:class:`.False_` is accessed as a constant via the
return False_()
-class True_(roles.ConstExprRole, ColumnElement):
+False_._create_singleton()
+
+
+class True_(SingletonConstant, roles.ConstExprRole, ColumnElement):
"""Represent the ``true`` keyword, or equivalent, in a SQL statement.
:class:`.True_` is accessed as a constant via the
return True_()
+True_._create_singleton()
+
+
class ClauseList(
roles.InElementRole,
roles.OrderByRole,
]
self._is_implicitly_boolean = operators.is_boolean(self.operator)
+ @classmethod
+ def _construct_raw(cls, operator, clauses=None):
+ self = cls.__new__(cls)
+ self.clauses = clauses if clauses else []
+ self.group = True
+ self.operator = operator
+ self.group_contents = True
+ self._tuple_values = False
+ self._is_implicitly_boolean = False
+ return self
+
def __iter__(self):
return iter(self.clauses)
)
@classmethod
- def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
-
+ def _process_clauses_for_boolean(
+ cls, operator, continue_on, skip_on, clauses
+ ):
has_continue_on = None
- special_elements = (continue_on, skip_on)
- convert_clauses = []
-
- for clause in util.coerce_generator_arg(clauses):
- clause = coercions.expect(roles.WhereHavingRole, clause)
- # elements that are not the continue/skip are the most
- # common, try to have only one isinstance() call for that case.
- if not isinstance(clause, special_elements):
- convert_clauses.append(clause)
- elif isinstance(clause, skip_on):
- # instance of skip_on, e.g. and_(x, y, False, z), cancels
- # the rest out
- return clause.self_group(against=operators._asbool)
- elif has_continue_on is None:
+ convert_clauses = []
+ for clause in clauses:
+ if clause is continue_on:
# instance of continue_on, like and_(x, y, True, z), store it
# if we didn't find one already, we will use it if there
# are no other expressions here.
has_continue_on = clause
+ elif clause is skip_on:
+ # instance of skip_on, e.g. and_(x, y, False, z), cancels
+ # the rest out
+ convert_clauses = [clause]
+ break
+ else:
+ convert_clauses.append(clause)
+
+ if not convert_clauses and has_continue_on is not None:
+ convert_clauses = [has_continue_on]
lcc = len(convert_clauses)
+ if lcc > 1:
+ against = operator
+ else:
+ against = operators._asbool
+ return lcc, [c.self_group(against=against) for c in convert_clauses]
+
+ @classmethod
+ def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
+
+ lcc, convert_clauses = cls._process_clauses_for_boolean(
+ operator,
+ continue_on,
+ skip_on,
+ [
+ coercions.expect(roles.WhereHavingRole, clause)
+ for clause in util.coerce_generator_arg(clauses)
+ ],
+ )
+
if lcc > 1:
# multiple elements. Return regular BooleanClauseList
# which will link elements against the operator.
- return cls._construct_raw(
- operator,
- [c.self_group(against=operator) for c in convert_clauses],
- )
+ return cls._construct_raw(operator, convert_clauses)
elif lcc == 1:
# just one element. return it as a single boolean element,
# not a list and discard the operator.
- return convert_clauses[0].self_group(against=operators._asbool)
- elif not lcc and has_continue_on is not None:
- # no elements but we had a "continue", just return the continue
- # as a boolean element, discard the operator.
- return has_continue_on.self_group(against=operators._asbool)
+ return convert_clauses[0]
else:
# no elements period. deprecated use case. return an empty
# ClauseList construct that generates nothing unless it has
"%(name)s() construct, use %(name)s(%(continue_on)s, *args)."
% {
"name": operator.__name__,
- "continue_on": "True" if continue_on is True_ else "False",
+ "continue_on": "True"
+ if continue_on is True_._singleton
+ else "False",
}
)
return cls._construct_raw(operator)
:func:`.or_`
"""
- return cls._construct(operators.and_, True_, False_, *clauses)
+ return cls._construct(
+ operators.and_, True_._singleton, False_._singleton, *clauses
+ )
@classmethod
def or_(cls, *clauses):
:func:`.and_`
"""
- return cls._construct(operators.or_, False_, True_, *clauses)
+ return cls._construct(
+ operators.or_, False_._singleton, True_._singleton, *clauses
+ )
@property
def _select_iterable(self):
return str.__repr__(self)
-def _select_iterables(elements):
- """expand tables into individual columns in the
- given list of column expressions.
-
- """
- return itertools.chain(*[c._select_iterable for c in elements])
-
-
def _find_columns(clause):
"""locate Column objects within the given expression."""
from .base import _from_objects # noqa
+from .base import _select_iterables # noqa
from .base import ColumnCollection # noqa
from .base import Executable # noqa
from .base import PARSE_AUTOCOMMIT # noqa
from .dml import Update # noqa
from .dml import UpdateBase # noqa
from .dml import ValuesBase # noqa
-from .elements import _select_iterables # noqa
from .elements import _truncated_label # noqa
from .elements import between # noqa
from .elements import BinaryExpression # noqa
from .base import _expand_cloned
from .base import _from_objects
from .base import _generative
+from .base import _select_iterables
from .base import ColumnCollection
from .base import ColumnSet
+from .base import CompileState
from .base import DedupeColumnCollection
from .base import Executable
from .base import Generative
+from .base import HasCompileState
from .base import HasMemoized
from .base import Immutable
from .coercions import _document_text_coercion
from .elements import _anonymous_label
-from .elements import _select_iterables
from .elements import and_
from .elements import BindParameter
from .elements import ClauseElement
from .elements import GroupedElement
from .elements import Grouping
from .elements import literal_column
-from .elements import True_
from .elements import UnaryExpression
from .visitors import InternalTraversal
from .. import exc
("skip_locked", InternalTraversal.dp_boolean),
]
- @classmethod
- def parse_legacy_select(self, arg):
- """Parse the for_update argument of :func:`.select`.
-
- :param mode: Defines the lockmode to use.
-
- ``None`` - translates to no lockmode
-
- ``'update'`` - translates to ``FOR UPDATE``
- (standard SQL, supported by most dialects)
-
- ``'nowait'`` - translates to ``FOR UPDATE NOWAIT``
- (supported by Oracle, PostgreSQL 8.1 upwards)
-
- ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
- and ``FOR SHARE`` (for PostgreSQL)
-
- ``'read_nowait'`` - translates to ``FOR SHARE NOWAIT``
- (supported by PostgreSQL). ``FOR SHARE`` and
- ``FOR SHARE NOWAIT`` (PostgreSQL).
-
- """
- if arg in (None, False):
- return None
-
- nowait = read = False
- if arg == "nowait":
- nowait = True
- elif arg == "read":
- read = True
- elif arg == "read_nowait":
- read = nowait = True
- elif arg is not True:
- raise exc.ArgumentError("Unknown for_update argument: %r" % arg)
-
- return ForUpdateArg(read=read, nowait=nowait)
-
- @property
- def legacy_for_update_value(self):
- if self.read and not self.nowait:
- return "read"
- elif self.read and self.nowait:
- return "read_nowait"
- elif self.nowait:
- return "nowait"
- else:
- return True
-
def __eq__(self, other):
return (
isinstance(other, ForUpdateArg)
"""
return Lateral._factory(self, name)
- @_generative
- @util.deprecated(
- "0.6",
- message="The :meth:`.SelectBase.autocommit` method is deprecated, "
- "and will be removed in a future release. Please use the "
- "the :paramref:`.Connection.execution_options.autocommit` "
- "parameter in conjunction with the "
- ":meth:`.Executable.execution_options` method.",
- )
- def autocommit(self):
- """return a new selectable with the 'autocommit' flag set to
- True.
- """
-
- self._execution_options = self._execution_options.union(
- {"autocommit": True}
- )
-
def _generate(self):
"""Override the default _generate() method to also clear out
exported collections."""
"""
- _order_by_clause = ClauseList()
- _group_by_clause = ClauseList()
+ _order_by_clauses = ()
+ _group_by_clauses = ()
_limit_clause = None
_offset_clause = None
_for_update_arg = None
def __init__(
self,
use_labels=False,
- for_update=False,
limit=None,
offset=None,
order_by=None,
group_by=None,
bind=None,
- autocommit=None,
):
self.use_labels = use_labels
- if for_update is not False:
- self._for_update_arg = ForUpdateArg.parse_legacy_select(for_update)
-
- if autocommit is not None:
- util.warn_deprecated(
- "The select.autocommit parameter is deprecated and will be "
- "removed in a future release. Please refer to the "
- "Select.execution_options.autocommit` parameter."
- )
- self._execution_options = self._execution_options.union(
- {"autocommit": autocommit}
- )
if limit is not None:
- self._limit_clause = self._offset_or_limit_clause(limit)
+ self.limit.non_generative(self, limit)
if offset is not None:
- self._offset_clause = self._offset_or_limit_clause(offset)
- self._bind = bind
+ self.offset.non_generative(self, offset)
if order_by is not None:
- self._order_by_clause = ClauseList(
- *util.to_list(order_by),
- _literal_as_text_role=roles.OrderByRole
- )
+ self.order_by.non_generative(self, *util.to_list(order_by))
if group_by is not None:
- self._group_by_clause = ClauseList(
- *util.to_list(group_by), _literal_as_text_role=roles.ByOfRole
- )
+ self.group_by.non_generative(self, *util.to_list(group_by))
- @property
- def for_update(self):
- """Provide legacy dialect support for the ``for_update`` attribute.
- """
- if self._for_update_arg is not None:
- return self._for_update_arg.legacy_for_update_value
- else:
- return None
-
- @for_update.setter
- def for_update(self, value):
- self._for_update_arg = ForUpdateArg.parse_legacy_select(value)
+ self._bind = bind
@_generative
def with_for_update(
on Oracle and PostgreSQL dialects or ``FOR SHARE SKIP LOCKED`` if
``read=True`` is also specified.
- .. versionadded:: 1.1.0
-
:param key_share: boolean, will render ``FOR NO KEY UPDATE``,
or if combined with ``read=True`` will render ``FOR KEY SHARE``,
on the PostgreSQL dialect.
- .. versionadded:: 1.1.0
-
"""
self._for_update_arg = ForUpdateArg(
nowait=nowait,
"""
self.use_labels = True
+ @property
+ def _group_by_clause(self):
+ """ClauseList access to group_by_clauses for legacy dialects"""
+ return ClauseList._construct_raw(
+ operators.comma_op, self._group_by_clauses
+ )
+
+ @property
+ def _order_by_clause(self):
+ """ClauseList access to order_by_clauses for legacy dialects"""
+ return ClauseList._construct_raw(
+ operators.comma_op, self._order_by_clauses
+ )
+
def _offset_or_limit_clause(self, element, name=None, type_=None):
"""Convert the given value to an "offset or limit" clause.
"""
if len(clauses) == 1 and clauses[0] is None:
- self._order_by_clause = ClauseList()
+ self._order_by_clauses = ()
else:
- if getattr(self, "_order_by_clause", None) is not None:
- clauses = list(self._order_by_clause) + list(clauses)
- self._order_by_clause = ClauseList(
- *clauses, _literal_as_text_role=roles.OrderByRole
+ self._order_by_clauses += tuple(
+ coercions.expect(roles.OrderByRole, clause)
+ for clause in clauses
)
@_generative
"""
if len(clauses) == 1 and clauses[0] is None:
- self._group_by_clause = ClauseList()
+ self._group_by_clauses = ()
else:
- if getattr(self, "_group_by_clause", None) is not None:
- clauses = list(self._group_by_clause) + list(clauses)
- self._group_by_clause = ClauseList(
- *clauses, _literal_as_text_role=roles.ByOfRole
+ self._group_by_clauses += tuple(
+ coercions.expect(roles.ByOfRole, clause) for clause in clauses
)
- @property
+
+class CompoundSelectState(CompileState):
+ @util.memoized_property
def _label_resolve_dict(self):
- raise NotImplementedError()
+ # TODO: this is hacky and slow
+ hacky_subquery = self.statement.subquery()
+ hacky_subquery.named_with_column = False
+ d = dict((c.key, c) for c in hacky_subquery.c)
+ return d, d, d
-class CompoundSelect(GenerativeSelect):
+class CompoundSelect(HasCompileState, GenerativeSelect):
"""Forms the basis of ``UNION``, ``UNION ALL``, and other
SELECT-based set operations.
"""
__visit_name__ = "compound_select"
+ _compile_state_factory = CompoundSelectState._create
_traverse_internals = [
("selects", InternalTraversal.dp_clauseelement_list),
("_limit_clause", InternalTraversal.dp_clauseelement),
("_offset_clause", InternalTraversal.dp_clauseelement),
- ("_order_by_clause", InternalTraversal.dp_clauseelement),
- ("_group_by_clause", InternalTraversal.dp_clauseelement),
+ ("_order_by_clauses", InternalTraversal.dp_clauseelement_list),
+ ("_group_by_clauses", InternalTraversal.dp_clauseelement_list),
("_for_update_arg", InternalTraversal.dp_clauseelement),
("keyword", InternalTraversal.dp_string),
] + SupportsCloneAnnotations._clone_annotations_traverse_internals
GenerativeSelect.__init__(self, **kwargs)
- @SelectBase._memoized_property
- def _label_resolve_dict(self):
- # TODO: this is hacky and slow
- hacky_subquery = self.subquery()
- hacky_subquery.named_with_column = False
- d = dict((c.key, c) for c in hacky_subquery.c)
- return d, d, d
-
@classmethod
def _create_union(cls, *selects, **kwargs):
r"""Return a ``UNION`` of multiple selectables.
"""
return self.selects[0].selected_columns
+ @property
def bind(self):
if self._bind:
return self._bind
else:
return None
- def _set_bind(self, bind):
+ @bind.setter
+ def bind(self, bind):
self._bind = bind
- bind = property(bind, _set_bind)
-
class DeprecatedSelectGenerations(object):
@util.deprecated(
self.select_from.non_generative(self, fromclause)
+class SelectState(CompileState):
+ def __init__(self, statement, compiler, **kw):
+ self.statement = statement
+ self.froms = self._get_froms(statement)
+
+ self.columns_plus_names = statement._generate_columns_plus_names(True)
+
+ def _get_froms(self, statement):
+ froms = []
+ seen = set()
+
+ for item in statement._iterate_from_elements():
+ if item._is_subquery and item.element is statement:
+ raise exc.InvalidRequestError(
+ "select() construct refers to itself as a FROM"
+ )
+ if not seen.intersection(item._cloned_set):
+ froms.append(item)
+ seen.update(item._cloned_set)
+
+ return froms
+
+ def _get_display_froms(
+ self, explicit_correlate_froms=None, implicit_correlate_froms=None
+ ):
+ """Return the full list of 'from' clauses to be displayed.
+
+ Takes into account a set of existing froms which may be
+ rendered in the FROM clause of enclosing selects; this Select
+ may want to leave those absent if it is automatically
+ correlating.
+
+ """
+ froms = self.froms
+
+ toremove = set(
+ itertools.chain.from_iterable(
+ [_expand_cloned(f._hide_froms) for f in froms]
+ )
+ )
+ if toremove:
+ # filter out to FROM clauses not in the list,
+ # using a list to maintain ordering
+ froms = [f for f in froms if f not in toremove]
+
+ if self.statement._correlate:
+ to_correlate = self.statement._correlate
+ if to_correlate:
+ froms = [
+ f
+ for f in froms
+ if f
+ not in _cloned_intersection(
+ _cloned_intersection(
+ froms, explicit_correlate_froms or ()
+ ),
+ to_correlate,
+ )
+ ]
+
+ if self.statement._correlate_except is not None:
+
+ froms = [
+ f
+ for f in froms
+ if f
+ not in _cloned_difference(
+ _cloned_intersection(
+ froms, explicit_correlate_froms or ()
+ ),
+ self.statement._correlate_except,
+ )
+ ]
+
+ if (
+ self.statement._auto_correlate
+ and implicit_correlate_froms
+ and len(froms) > 1
+ ):
+
+ froms = [
+ f
+ for f in froms
+ if f
+ not in _cloned_intersection(froms, implicit_correlate_froms)
+ ]
+
+ if not len(froms):
+ raise exc.InvalidRequestError(
+ "Select statement '%s"
+ "' returned no FROM clauses "
+ "due to auto-correlation; "
+ "specify correlate(<tables>) "
+ "to control correlation "
+ "manually." % self
+ )
+
+ return froms
+
+ @util.memoized_property
+ def _label_resolve_dict(self):
+ with_cols = dict(
+ (c._resolve_label or c._label or c.key, c)
+ for c in _select_iterables(self.statement._raw_columns)
+ if c._allow_label_resolve
+ )
+ only_froms = dict(
+ (c.key, c)
+ for c in _select_iterables(self.froms)
+ if c._allow_label_resolve
+ )
+ only_cols = with_cols.copy()
+ for key, value in only_froms.items():
+ with_cols.setdefault(key, value)
+
+ return with_cols, only_froms, only_cols
+
+
class Select(
- HasPrefixes, HasSuffixes, DeprecatedSelectGenerations, GenerativeSelect
+ HasPrefixes,
+ HasSuffixes,
+ HasCompileState,
+ DeprecatedSelectGenerations,
+ GenerativeSelect,
):
"""Represents a ``SELECT`` statement.
__visit_name__ = "select"
+ _compile_state_factory = SelectState._create
+
_hints = util.immutabledict()
_statement_hints = ()
_distinct = False
_distinct_on = ()
_correlate = ()
_correlate_except = None
+ _where_criteria = ()
+ _having_criteria = ()
+ _from_obj = ()
+ _auto_correlate = True
+
_memoized_property = SelectBase._memoized_property
_traverse_internals = (
[
- ("_from_obj", InternalTraversal.dp_fromclause_ordered_set),
+ ("_from_obj", InternalTraversal.dp_clauseelement_list),
("_raw_columns", InternalTraversal.dp_clauseelement_list),
- ("_whereclause", InternalTraversal.dp_clauseelement),
- ("_having", InternalTraversal.dp_clauseelement),
- (
- "_order_by_clause.clauses",
- InternalTraversal.dp_clauseelement_list,
- ),
- (
- "_group_by_clause.clauses",
- InternalTraversal.dp_clauseelement_list,
- ),
+ ("_where_criteria", InternalTraversal.dp_clauseelement_list),
+ ("_having_criteria", InternalTraversal.dp_clauseelement_list),
+ ("_order_by_clauses", InternalTraversal.dp_clauseelement_list,),
+ ("_group_by_clauses", InternalTraversal.dp_clauseelement_list,),
("_correlate", InternalTraversal.dp_clauseelement_unordered_set),
(
"_correlate_except",
for ent in util.to_list(entities)
]
- # this should all go away once Select is converted to have
- # default state at the class level
- self._auto_correlate = True
- self._from_obj = util.OrderedSet()
- self._whereclause = None
- self._having = None
-
GenerativeSelect.__init__(self)
return self
else:
return Select._create_select(*entities)
- @util.deprecated_params(
- autocommit=(
- "0.6",
- "The :paramref:`.select.autocommit` parameter is deprecated "
- "and will be removed in a future release. Please refer to "
- "the :paramref:`.Connection.execution_options.autocommit` "
- "parameter in conjunction with the the "
- ":meth:`.Executable.execution_options` method in order to "
- "affect the autocommit behavior for a statement.",
- ),
- for_update=(
- "0.9",
- "The :paramref:`.select.for_update` parameter is deprecated and "
- "will be removed in a future release. Please refer to the "
- ":meth:`.Select.with_for_update` to specify the "
- "structure of the ``FOR UPDATE`` clause.",
- ),
- )
def __init__(
self,
columns=None,
:meth:`.Select.select_from` - full description of explicit
FROM clause specification.
- :param autocommit: legacy autocommit parameter.
-
:param bind=None:
an :class:`~.Engine` or :class:`~.Connection` instance
to which the
:meth:`.Select.distinct`
- :param for_update=False:
- when ``True``, applies ``FOR UPDATE`` to the end of the
- resulting statement.
-
- ``for_update`` accepts various string values interpreted by
- specific backends, including:
-
- * ``"read"`` - on MySQL, translates to ``LOCK IN SHARE MODE``;
- on PostgreSQL, translates to ``FOR SHARE``.
- * ``"nowait"`` - on PostgreSQL and Oracle, translates to
- ``FOR UPDATE NOWAIT``.
- * ``"read_nowait"`` - on PostgreSQL, translates to
- ``FOR SHARE NOWAIT``.
-
- .. seealso::
-
- :meth:`.Select.with_for_update` - improved API for
- specifying the ``FOR UPDATE`` clause.
-
:param group_by:
a list of :class:`.ClauseElement` objects which will comprise the
``GROUP BY`` clause of the resulting select. This parameter
)
self._auto_correlate = correlate
+
if distinct is not False:
- self._distinct = True
- if not isinstance(distinct, bool):
- self._distinct_on = tuple(
- [
- coercions.expect(roles.ByOfRole, e)
- for e in util.to_list(distinct)
- ]
- )
+ if distinct is True:
+ self.distinct.non_generative(self)
+ else:
+ self.distinct.non_generative(self, *util.to_list(distinct))
if from_obj is not None:
- self._from_obj = util.OrderedSet(
- coercions.expect(roles.FromClauseRole, f)
- for f in util.to_list(from_obj)
- )
- else:
- self._from_obj = util.OrderedSet()
+ self.select_from.non_generative(self, *util.to_list(from_obj))
try:
cols_present = bool(columns)
)
if cols_present:
- self._raw_columns = []
- for c in columns:
- c = coercions.expect(roles.ColumnsClauseRole, c)
- self._raw_columns.append(c)
+ self._raw_columns = [
+ coercions.expect(roles.ColumnsClauseRole, c,) for c in columns
+ ]
else:
self._raw_columns = []
if whereclause is not None:
- self._whereclause = coercions.expect(
- roles.WhereHavingRole, whereclause
- ).self_group(against=operators._asbool)
- else:
- self._whereclause = None
+ self.where.non_generative(self, whereclause)
if having is not None:
- self._having = coercions.expect(
- roles.WhereHavingRole, having
- ).self_group(against=operators._asbool)
- else:
- self._having = None
+ self.having.non_generative(self, having)
if prefixes:
self._setup_prefixes(prefixes)
GenerativeSelect.__init__(self, **kwargs)
- @property
- def _froms(self):
- # current roadblock to caching is two tests that test that the
- # SELECT can be compiled to a string, then a Table is created against
- # columns, then it can be compiled again and works. this is somewhat
- # valid as people make select() against declarative class where
- # columns don't have their Table yet and perhaps some operations
- # call upon _froms and cache it too soon.
- froms = []
- seen = set()
-
- for item in itertools.chain(
- _from_objects(*self._raw_columns),
- _from_objects(self._whereclause)
- if self._whereclause is not None
- else (),
- self._from_obj,
- ):
- if item._is_subquery and item.element is self:
- raise exc.InvalidRequestError(
- "select() construct refers to itself as a FROM"
- )
- if not seen.intersection(item._cloned_set):
- froms.append(item)
- seen.update(item._cloned_set)
-
- return froms
-
- def _get_display_froms(
- self, explicit_correlate_froms=None, implicit_correlate_froms=None
- ):
- """Return the full list of 'from' clauses to be displayed.
-
- Takes into account a set of existing froms which may be
- rendered in the FROM clause of enclosing selects; this Select
- may want to leave those absent if it is automatically
- correlating.
-
- """
- froms = self._froms
-
- toremove = set(
- itertools.chain(*[_expand_cloned(f._hide_froms) for f in froms])
- )
- if toremove:
- # filter out to FROM clauses not in the list,
- # using a list to maintain ordering
- froms = [f for f in froms if f not in toremove]
-
- if self._correlate:
- to_correlate = self._correlate
- if to_correlate:
- froms = [
- f
- for f in froms
- if f
- not in _cloned_intersection(
- _cloned_intersection(
- froms, explicit_correlate_froms or ()
- ),
- to_correlate,
- )
- ]
-
- if self._correlate_except is not None:
-
- froms = [
- f
- for f in froms
- if f
- not in _cloned_difference(
- _cloned_intersection(
- froms, explicit_correlate_froms or ()
- ),
- self._correlate_except,
- )
- ]
-
- if (
- self._auto_correlate
- and implicit_correlate_froms
- and len(froms) > 1
- ):
-
- froms = [
- f
- for f in froms
- if f
- not in _cloned_intersection(froms, implicit_correlate_froms)
- ]
-
- if not len(froms):
- raise exc.InvalidRequestError(
- "Select statement '%s"
- "' returned no FROM clauses "
- "due to auto-correlation; "
- "specify correlate(<tables>) "
- "to control correlation "
- "manually." % self
- )
-
- return froms
-
def _scalar_type(self):
elem = self._raw_columns[0]
cols = list(elem._select_iterable)
return cols[0].type
+ def _iterate_from_elements(self):
+ return itertools.chain(
+ itertools.chain.from_iterable(
+ [element._from_objects for element in self._raw_columns]
+ ),
+ itertools.chain.from_iterable(
+ [element._from_objects for element in self._where_criteria]
+ ),
+ self._from_obj,
+ )
+
@property
def froms(self):
"""Return the displayed list of FromClause elements."""
- return self._get_display_froms()
+ return self._compile_state_factory(self, None)._get_display_froms()
def with_statement_hint(self, text, dialect_name="*"):
"""add a statement hint to this :class:`.Select`.
else:
self._hints = self._hints.union({(selectable, dialect_name): text})
- @_memoized_property.method
- def locate_all_froms(self):
- """return a Set of all FromClause elements referenced by this Select.
-
- This set is a superset of that returned by the ``froms`` property,
- which is specifically for those FromClause elements that would
- actually be rendered.
-
- """
- froms = self._froms
- return froms + list(_from_objects(*froms))
-
@property
def inner_columns(self):
"""an iterator of all ColumnElement expressions which would
"""
return _select_iterables(self._raw_columns)
- @_memoized_property
- def _label_resolve_dict(self):
- with_cols = dict(
- (c._resolve_label or c._label or c.key, c)
- for c in _select_iterables(self._raw_columns)
- if c._allow_label_resolve
- )
- only_froms = dict(
- (c.key, c)
- for c in _select_iterables(self.froms)
- if c._allow_label_resolve
- )
- only_cols = with_cols.copy()
- for key, value in only_froms.items():
- with_cols.setdefault(key, value)
-
- return with_cols, only_froms, only_cols
-
def is_derived_from(self, fromclause):
if self in fromclause._cloned_set:
return True
- for f in self.locate_all_froms():
+ for f in self._iterate_from_elements():
if f.is_derived_from(fromclause):
return True
return False
# objects
# 1. keep a dictionary of the froms we've cloned, and what
- # they've become. This is consulted later when we derive
- # additional froms from "whereclause" and the columns clause,
- # which may still reference the uncloned parent table.
- # as of 0.7.4 we also put the current version of _froms, which
- # gets cleared on each generation. previously we were "baking"
- # _froms into self._from_obj.
+ # they've become. This allows us to ensure the same cloned from
+ # is used when other items such as columns are "cloned"
all_the_froms = list(
itertools.chain(
_from_objects(*self._raw_columns),
- _from_objects(self._whereclause)
- if self._whereclause is not None
- else (),
+ _from_objects(*self._where_criteria),
)
)
new_froms = {f: clone(f, **kw) for f in all_the_froms}
- # copy FROM collections
- self._from_obj = util.OrderedSet(
- clone(f, **kw) for f in self._from_obj
- ).union(f for f in new_froms.values() if isinstance(f, Join))
-
- self._correlate = set(clone(f, **kw) for f in self._correlate)
- if self._correlate_except:
- self._correlate_except = set(
- clone(f, **kw) for f in self._correlate_except
- )
+ # 2. copy FROM collections.
+ self._from_obj = tuple(clone(f, **kw) for f in self._from_obj) + tuple(
+ f for f in new_froms.values() if isinstance(f, Join)
+ )
- # 4. clone other things. The difficulty here is that Column
- # objects are usually not altered by a straight clone because they
- # are dependent on the FROM cloning we just did above in order to
- # be targeted correctly, or a new FROM we have might be a JOIN
- # object which doesn't have its own columns. so give the cloner a
- # hint.
+ # 3. clone everything else, making sure we use columns
+ # corresponding to the froms we just made.
def replace(obj, **kw):
if isinstance(obj, ColumnClause) and obj.table in new_froms:
newelem = new_froms[obj.table].corresponding_column(obj)
kw["replace"] = replace
- # TODO: I'd still like to try to leverage the traversal data
- self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
- for attr in (
- "_limit_clause",
- "_offset_clause",
- "_whereclause",
- "_having",
- "_order_by_clause",
- "_group_by_clause",
- "_for_update_arg",
- ):
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr), **kw))
+ super(Select, self)._copy_internals(
+ clone=clone, omit_attrs=("_from_obj",), **kw
+ )
self._reset_memoizations()
def get_children(self, **kwargs):
- # TODO: define "get_children" traversal items separately?
- return self._froms + super(Select, self).get_children(
+ return list(set(self._iterate_from_elements())) + super(
+ Select, self
+ ).get_children(
omit_attrs=["_from_obj", "_correlate", "_correlate_except"]
)
self._reset_memoizations()
self._raw_columns = self._raw_columns + [
- coercions.expect(roles.ColumnsClauseRole, column)
+ coercions.expect(roles.ColumnsClauseRole, column,)
for column in columns
]
sqlutil.reduce_columns(
self.inner_columns,
only_synonyms=only_synonyms,
- *(self._whereclause,) + tuple(self._from_obj)
+ *(self._where_criteria + self._from_obj)
)
)
self._reset_memoizations()
rc = []
for c in columns:
- c = coercions.expect(roles.ColumnsClauseRole, c)
+ c = coercions.expect(roles.ColumnsClauseRole, c,)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
rc.append(c)
self._raw_columns = rc
+ @property
+ def _whereclause(self):
+ """Legacy, return the WHERE clause as a :class:`.BooleanClauseList`"""
+
+ return and_(*self._where_criteria)
+
@_generative
def where(self, whereclause):
"""return a new select() construct with the given expression added to
"""
- self._reset_memoizations()
- self._whereclause = and_(True_._ifnone(self._whereclause), whereclause)
+ self._where_criteria += (
+ coercions.expect(roles.WhereHavingRole, whereclause),
+ )
@_generative
def having(self, having):
its HAVING clause, joined to the existing clause via AND, if any.
"""
- self._reset_memoizations()
- self._having = and_(True_._ifnone(self._having), having)
+ self._having_criteria += (
+ coercions.expect(roles.WhereHavingRole, having),
+ )
@_generative
def distinct(self, *expr):
"""
if expr:
- expr = [coercions.expect(roles.ByOfRole, e) for e in expr]
self._distinct = True
- self._distinct_on = self._distinct_on + tuple(expr)
+ self._distinct_on = self._distinct_on + tuple(
+ coercions.expect(roles.ByOfRole, e) for e in expr
+ )
else:
self._distinct = True
@_generative
- def select_from(self, fromclause):
+ def select_from(self, *froms):
r"""return a new :func:`.select` construct with the
- given FROM expression
+ given FROM expression(s)
merged into its list of FROM objects.
E.g.::
select([func.count('*')]).select_from(table1)
"""
- self._reset_memoizations()
- fromclause = coercions.expect(roles.FromClauseRole, fromclause)
- self._from_obj = self._from_obj.union([fromclause])
+
+ self._from_obj += tuple(
+ coercions.expect(roles.FromClauseRole, fromclause)
+ for fromclause in froms
+ )
@_generative
def correlate(self, *fromclauses):
names = {}
def name_for_col(c):
- if c._label is None or not c._render_label_in_columns_clause:
+ if not c._render_label_in_columns_clause:
return (None, c, False)
+ elif c._label is None:
+ repeated = c.anon_label in names
+ names[c.anon_label] = c
+ return (None, c, repeated)
- repeated = False
name = c._label
+ repeated = False
+
if name in names:
# when looking to see if names[name] is the same column as
# c, use hash(), so that an annotated version of the column
return [name_for_col(c) for c in cols]
else:
# repeated name logic only for use labels at the moment
- return [(None, c, False) for c in cols]
-
- @_memoized_property
- def _columns_plus_names(self):
- """generate label names plus columns to render in a SELECT."""
+ same_cols = set()
- return self._generate_columns_plus_names(True)
+ return [
+ (None, c, c in same_cols or same_cols.add(c)) for c in cols
+ ]
def _generate_fromclause_column_proxies(self, subquery):
"""generate column proxies to place in the exported .c collection
"""
return CompoundSelect._create_intersect_all(self, other, **kwargs)
+ @property
def bind(self):
if self._bind:
return self._bind
- froms = self._froms
- if not froms:
- for c in self._raw_columns:
- e = c.bind
- if e:
- self._bind = e
- return e
- else:
- e = list(froms)[0].bind
+
+ for item in self._iterate_from_elements():
+ if item._is_subquery and item.element is self:
+ raise exc.InvalidRequestError(
+ "select() construct refers to itself as a FROM"
+ )
+
+ e = item.bind
if e:
self._bind = e
return e
+ else:
+ break
- return None
+ for c in self._raw_columns:
+ e = c.bind
+ if e:
+ self._bind = e
+ return e
- def _set_bind(self, bind):
+ @bind.setter
+ def bind(self, bind):
self._bind = bind
- bind = property(bind, _set_bind)
-
class ScalarSelect(roles.InElementRole, Generative, Grouping):
_from_objects = []
def visit_clauseelement_list(self, parent, element, clone=_clone, **kw):
return [clone(clause, **kw) for clause in element]
+ def visit_clauseelement_unordered_set(
+ self, parent, element, clone=_clone, **kw
+ ):
+ return {clone(clause, **kw) for clause in element}
+
def visit_clauseelement_tuples(self, parent, element, clone=_clone, **kw):
return [
tuple(clone(tup_elem, **kw) for tup_elem in elem)
exception = arg[0]
else:
exception = None
+
if not exception or not issubclass(exception, exc_cls):
return real_warn(msg, *arg, **kw)
@assert_cycles(6)
def go():
s = select([users]).select_from(users.join(addresses))
- s._froms
+ state = s._compile_state_factory(s, None)
+ state.froms
go()
for i in range(100)
]
- @profiling.function_call_count(variance=0.15, warmup=1)
+ @profiling.function_call_count(variance=0.15, warmup=2)
def test_statement_one(self, stmt_fixture_one):
current_key = None
for stmt in stmt_fixture_one:
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import testing
-from sqlalchemy import text
from sqlalchemy import TypeDecorator
from sqlalchemy.engine import reflection
from sqlalchemy.engine.base import Engine
e = testing.db
conn = e.connect()
- with testing.expect_deprecated(
- r"The .close\(\) method on a so-called 'branched' "
- "connection is deprecated"
+ with testing.expect_deprecated_20(
+ r"The Connection.connect\(\) function/method is considered",
+ r"The .close\(\) method on a so-called 'branched' connection is "
+ r"deprecated as of 1.4, as are 'branched' connections overall, "
+ r"and will be removed in a future release.",
):
with conn.connect() as c2:
assert not c2.closed
assert not conn.closed
assert c2.closed
+ @testing.provide_metadata
+ def test_explicit_connectionless_execute(self):
+ table = Table("t", self.metadata, Column("a", Integer))
+ table.create(testing.db)
+
+ stmt = table.insert().values(a=1)
+ with testing.expect_deprecated_20(
+ r"The Engine.execute\(\) function/method is considered legacy",
+ ):
+ testing.db.execute(stmt)
+
+ stmt = select([table])
+ with testing.expect_deprecated_20(
+ r"The Engine.execute\(\) function/method is considered legacy",
+ ):
+ eq_(testing.db.execute(stmt).fetchall(), [(1,)])
+
+ @testing.provide_metadata
+ def test_implicit_execute(self):
+ table = Table("t", self.metadata, Column("a", Integer))
+ table.create(testing.db)
+
+ stmt = table.insert().values(a=1)
+ with testing.expect_deprecated_20(
+ r"The Executable.execute\(\) function/method is considered legacy",
+ ):
+ stmt.execute()
+
+ stmt = select([table])
+ with testing.expect_deprecated_20(
+ r"The Executable.execute\(\) function/method is considered legacy",
+ ):
+ eq_(stmt.execute().fetchall(), [(1,)])
+
class CreateEngineTest(fixtures.TestBase):
def test_strategy_keyword_mock(self):
)
-class ExplicitAutoCommitDeprecatedTest(fixtures.TestBase):
-
- """test the 'autocommit' flag on select() and text() objects.
-
- Requires PostgreSQL so that we may define a custom function which
- modifies the database. """
-
- __only_on__ = "postgresql"
-
- @classmethod
- def setup_class(cls):
- global metadata, foo
- metadata = MetaData(testing.db)
- foo = Table(
- "foo",
- metadata,
- Column("id", Integer, primary_key=True),
- Column("data", String(100)),
- )
- metadata.create_all()
- testing.db.execute(
- "create function insert_foo(varchar) "
- "returns integer as 'insert into foo(data) "
- "values ($1);select 1;' language sql"
- )
-
- def teardown(self):
- foo.delete().execute().close()
-
- @classmethod
- def teardown_class(cls):
- testing.db.execute("drop function insert_foo(varchar)")
- metadata.drop_all()
-
- def test_explicit_compiled(self):
- conn1 = testing.db.connect()
- conn2 = testing.db.connect()
- with testing.expect_deprecated(
- "The select.autocommit parameter is deprecated"
- ):
- conn1.execute(select([func.insert_foo("data1")], autocommit=True))
- assert conn2.execute(select([foo.c.data])).fetchall() == [("data1",)]
- with testing.expect_deprecated(
- r"The SelectBase.autocommit\(\) method is deprecated,"
- ):
- conn1.execute(select([func.insert_foo("data2")]).autocommit())
- assert conn2.execute(select([foo.c.data])).fetchall() == [
- ("data1",),
- ("data2",),
- ]
- conn1.close()
- conn2.close()
-
- def test_explicit_text(self):
- conn1 = testing.db.connect()
- conn2 = testing.db.connect()
- with testing.expect_deprecated(
- "The text.autocommit parameter is deprecated"
- ):
- conn1.execute(
- text("select insert_foo('moredata')", autocommit=True)
- )
- assert conn2.execute(select([foo.c.data])).fetchall() == [
- ("moredata",)
- ]
- conn1.close()
- conn2.close()
-
-
class DeprecatedEngineFeatureTest(fixtures.TablesTest):
__backend__ = True
.scalar_subquery()
)
# test a different unary operator
+ # TODO: there is no test in Core that asserts what is happening
+ # here as far as the label generation for the ORDER BY
self.assert_compile(
create_session()
.query(A)
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_cextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_nocextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_cextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_nocextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_cextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_nocextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_cextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_cextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_cextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 61
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 66
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_cextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_cextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_cextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_cextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_cextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_cextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 67
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 166
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_cextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_nocextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_cextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_nocextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_cextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_nocextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_cextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_cextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_cextensions 174
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 174
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 182
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_cextensions 182
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 182
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_cextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_nocextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_cextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_nocextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_cextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_nocextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_cextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_cextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_cextensions 195
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_cextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_nocextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_cextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_nocextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_cextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_nocextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_cextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_cextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_cextensions 197
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 197
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_cextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_nocextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_cextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_nocextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_cextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_cextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_cextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 68
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_cextensions 69
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_nocextensions 69
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 71
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_cextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_cextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_cextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_cextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_cextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_cextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 72
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_cextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_nocextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_cextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_nocextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_cextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_nocextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_cextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_cextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_cextensions 155
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 155
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_cextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_nocextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_cextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_nocextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_cextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_nocextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_cextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_cextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_cextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 154
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_cextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_nocextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_cextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_nocextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_cextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_nocextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_cextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_cextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_cextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 162
# TEST: test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one
-test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 2.7_sqlite_pysqlite_dbapiunicode_cextensions 4702
-test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 4302
-test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 3.7_sqlite_pysqlite_dbapiunicode_cextensions 4903
-test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 4903
+test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 2.7_sqlite_pysqlite_dbapiunicode_cextensions 4002
+test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 4002
+test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 3.7_sqlite_pysqlite_dbapiunicode_cextensions 4703
+test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 4703
# TEST: test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_members
with self._nested_result() as nested:
contexts[stmt2.element] = nested
text = super(MyCompiler, self).visit_select(
- stmt2.element
+ stmt2.element,
)
self._add_to_result_map("k1", "k1", (1, 2, 3), int_)
else:
eq_(len(stmt.subquery().c), 7)
# will render 7 as well
- eq_(len(stmt._columns_plus_names), 7)
+ eq_(len(stmt._compile_state_factory(stmt, None).columns_plus_names), 7)
wrapped = stmt._generate()
wrapped = wrapped.add_columns(
assert t1t2.onclause.compare(join_cond)
- def test_select_autocommit(self):
- with testing.expect_deprecated(
- "The select.autocommit parameter is deprecated and "
- "will be removed in a future release."
- ):
- select([column("x")], autocommit=True)
-
- def test_select_for_update(self):
- with testing.expect_deprecated(
- "The select.for_update parameter is deprecated and "
- "will be removed in a future release."
- ):
- select([column("x")], for_update=True)
-
def test_empty_and_or(self):
with testing.expect_deprecated(
r"Invoking and_\(\) without arguments is deprecated, and "
eq_(proc(utfdata), unicodedata.encode("ascii", "ignore").decode())
-class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = "default"
-
- def _assert_legacy(self, leg, read=False, nowait=False):
- t = table("t", column("c"))
-
- with testing.expect_deprecated(
- "The select.for_update parameter is deprecated and "
- "will be removed in a future release."
- ):
- s1 = select([t], for_update=leg)
-
- if leg is False:
- assert s1._for_update_arg is None
- assert s1.for_update is None
- else:
- eq_(s1._for_update_arg.read, read)
- eq_(s1._for_update_arg.nowait, nowait)
- eq_(s1.for_update, leg)
-
- def test_false_legacy(self):
- self._assert_legacy(False)
-
- def test_plain_true_legacy(self):
- self._assert_legacy(True)
-
- def test_read_legacy(self):
- self._assert_legacy("read", read=True)
-
- def test_nowait_legacy(self):
- self._assert_legacy("nowait", nowait=True)
-
- def test_read_nowait_legacy(self):
- self._assert_legacy("read_nowait", read=True, nowait=True)
-
- def test_unknown_mode(self):
- t = table("t", column("c"))
-
- with testing.expect_deprecated(
- "The select.for_update parameter is deprecated and "
- "will be removed in a future release."
- ):
- assert_raises_message(
- exc.ArgumentError,
- "Unknown for_update argument: 'unknown_mode'",
- t.select,
- t.c.c == 7,
- for_update="unknown_mode",
- )
-
- def test_legacy_setter(self):
- t = table("t", column("c"))
- s = select([t])
- s.for_update = "nowait"
- eq_(s._for_update_arg.nowait, True)
-
-
class SubqueryCoercionsTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
},
)
- def test_autocommit(self):
- with testing.expect_deprecated(
- "The text.autocommit parameter is deprecated"
- ):
- text("select id, name from user", autocommit=True)
-
class SelectableTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
metadata.create_all(testing.db)
with testing.db.connect() as conn:
- with testing.expect_deprecated(
+ with testing.expect_deprecated_20(
r"The .close\(\) method on a so-called 'branched' "
r"connection is deprecated as of 1.4, as are "
r"'branched' connections overall"
s = select(
[literal_column("*")], from_obj=[t1alias, t2alias]
).scalar_subquery()
- assert t2alias in s._froms
- assert t1alias in s._froms
+
+ froms = list(s._iterate_from_elements())
+ assert t2alias in froms
+ assert t1alias in froms
self.assert_compile(
select([literal_column("*")], t2alias.c.col1 == s),
)
s = vis.traverse(s)
- assert t2alias in s._froms # present because it was not cloned
- assert t1alias in s._froms # present because the adapter placed
+ froms = list(s._iterate_from_elements())
+ assert t2alias in froms # present because it was not cloned
+ assert t1alias in froms # present because the adapter placed
# it there and was also not cloned
# correlate list on "s" needs to take into account the full
"SELECT table1.col1, table1.col2, "
"table1.col3 FROM table1, table2",
)
- assert s._froms is not select_copy._froms
+
self.assert_compile(
s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1"
)
def test_add_kwarg(self):
i = t1.insert()
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
eq_(compile_state._dict_parameters, None)
i = i.values(col1=5)
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(compile_state._dict_parameters, {"col1": 5})
i = i.values(col2=7)
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(
compile_state._dict_parameters, {"col1": 5, "col2": 7}
)
def test_via_tuple_single(self):
i = t1.insert()
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
eq_(compile_state._dict_parameters, None)
i = i.values((5, 6, 7))
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(
compile_state._dict_parameters, {"col1": 5, "col2": 6, "col3": 7},
def test_via_tuple_multi(self):
i = t1.insert()
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
eq_(compile_state._dict_parameters, None)
i = i.values([(5, 6, 7), (8, 9, 10)])
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
eq_(
compile_state._dict_parameters, {"col1": 5, "col2": 6, "col3": 7},
)
def test_inline_values_single(self):
i = t1.insert(values={"col1": 5})
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(compile_state._dict_parameters, {"col1": 5})
is_(compile_state._has_multi_parameters, False)
def test_inline_values_multi(self):
i = t1.insert(values=[{"col1": 5}, {"col1": 6}])
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
# multiparams are not converted to bound parameters
eq_(compile_state._dict_parameters, {"col1": 5})
def test_add_dictionary(self):
i = t1.insert()
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
eq_(compile_state._dict_parameters, None)
i = i.values({"col1": 5})
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(compile_state._dict_parameters, {"col1": 5})
is_(compile_state._has_multi_parameters, False)
i = i.values({"col1": 6})
# note replaces
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(compile_state._dict_parameters, {"col1": 6})
is_(compile_state._has_multi_parameters, False)
i = i.values({"col2": 7})
- compile_state = i._compile_state_cls(i, None, isinsert=True)
+ compile_state = i._compile_state_factory(i, None)
self._compare_param_dict(
compile_state._dict_parameters, {"col1": 6, "col2": 7}
)
def test_empty_clauses(self, op, str_op, str_continue):
# these warning classes will change to ArgumentError when the
# deprecated behavior is disabled
+
assert_raises_message(
exc.SADeprecationWarning,
r"Invoking %(str_op)s\(\) without arguments is deprecated, and "
select([x]).where(~null()), "SELECT x WHERE NOT NULL"
)
- def test_constant_non_singleton(self):
- is_not_(null(), null())
- is_not_(false(), false())
- is_not_(true(), true())
+ def test_constants_are_singleton(self):
+ is_(null(), null())
+ is_(false(), false())
+ is_(true(), true())
def test_constant_render_distinct(self):
self.assert_compile(
- select([null(), null()]), "SELECT NULL AS anon_1, NULL AS anon_2"
+ select([null(), null()]), "SELECT NULL AS anon_1, NULL AS anon__1"
)
self.assert_compile(
- select([true(), true()]), "SELECT true AS anon_1, true AS anon_2"
+ select([true(), true()]), "SELECT true AS anon_1, true AS anon__1"
)
self.assert_compile(
select([false(), false()]),
- "SELECT false AS anon_1, false AS anon_2",
+ "SELECT false AS anon_1, false AS anon__1",
+ )
+
+ def test_constant_render_distinct_use_labels(self):
+ self.assert_compile(
+ select([null(), null()]).apply_labels(),
+ "SELECT NULL AS anon_1, NULL AS anon__1",
+ )
+ self.assert_compile(
+ select([true(), true()]).apply_labels(),
+ "SELECT true AS anon_1, true AS anon__1",
+ )
+ self.assert_compile(
+ select([false(), false()]).apply_labels(),
+ "SELECT false AS anon_1, false AS anon__1",
)
def test_is_true_literal(self):
s3 = sql_util.ClauseAdapter(ta).traverse(s2)
- assert s1 not in s3._froms
+ froms = list(s3._iterate_from_elements())
+
+ assert s1 not in froms
# these are new assumptions with the newer approach that
# actively swaps out whereclause and others
assert s3._whereclause.left.table is not s1
- assert s3._whereclause.left.table in s3._froms
+ assert s3._whereclause.left.table in froms
class RefreshForNewColTest(fixtures.TestBase):
):
# the columns clause isn't changed at all
assert sel._raw_columns[0].table is a1
- assert sel._froms[0].element is sel._froms[1].left.element
+ froms = list(sel._iterate_from_elements())
+ assert froms[0].element is froms[1].left.element
eq_(str(s), str(sel))
sql_util._deep_deannotate(s, {"foo": "bar"}),
sql_util._deep_annotate(s, {"foo": "bar"}),
):
- assert sel._froms[0] is not sel._froms[1].left
+ froms = list(sel._iterate_from_elements())
+ assert froms[0] is not froms[1].left
# but things still work out due to
# re49563072578
+from sqlalchemy import Column
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import select
+from sqlalchemy import String
+from sqlalchemy import Table
from sqlalchemy.sql import util as sql_util
from sqlalchemy.sql.elements import ColumnElement
from sqlalchemy.testing import eq_
_traverse_internals = []
eq_(sql_util.find_tables(MyElement(), check_columns=True), [])
+
+ def test_find_tables_selectable(self):
+ metadata = MetaData()
+ common = Table(
+ "common",
+ metadata,
+ Column("id", Integer, primary_key=True),
+ Column("data", Integer),
+ Column("extra", String(45)),
+ )
+
+ subset_select = select([common.c.id, common.c.data]).alias()
+
+ eq_(sql_util.find_tables(subset_select), [common])