import sys
+
from packaging import tags
to_check = "--"
That is, this will now raise::
- stmt1 = select([user.c.id, user.c.name])
- stmt2 = select([addresses, stmt1]).select_from(addresses.join(stmt1))
+ stmt1 = select(user.c.id, user.c.name)
+ stmt2 = select(addresses, stmt1).select_from(addresses.join(stmt1))
Raising::
that are in the columns clause of the SELECT statement. A common beginner mistake
is code such as the following::
- stmt = select([users])
+ stmt = select(users)
stmt = stmt.where(stmt.c.name == 'foo')
The above code appears intuitive and that it would generate
the use case above, as in a case like the above it links directly to the columns
present in the ``users.c`` collection::
- stmt = select([users])
+ stmt = select(users)
stmt = stmt.where(stmt.selected_columns.name == 'foo')
it's now invoked automatically whenever a list of values is passed to
an IN expression::
- stmt = select([A.id, A.data]).where(A.id.in_([1, 2, 3]))
+ stmt = select(A.id, A.data).where(A.id.in_([1, 2, 3]))
The pre-execution string representation is::
>>> from sqlalchemy import column, select
>>> c1, c2, c3, c4 = column('c1'), column('c2'), column('c3'), column('c4')
- >>> stmt = select([c1, c2, c3.label('c2'), c2, c4])
+ >>> stmt = select(c1, c2, c3.label('c2'), c2, c4)
>>> print(stmt)
SELECT c1, c2, c3 AS c2, c2, c4
>>> from sqlalchemy import table
>>> user = table('user', column('id'), column('name'))
- >>> stmt = select([user.c.id, user.c.name, user.c.id]).apply_labels()
+ >>> stmt = select(user.c.id, user.c.name, user.c.id).apply_labels()
>>> print(stmt)
SELECT "user".id AS user_id, "user".name AS user_name, "user".id AS id_1
FROM "user"
of columns in a SELECT statement mirrors what was given, in a use case such
as::
- >>> s1 = select([user, user.c.id])
- >>> s2 = select([c1, c2, c3])
+ >>> s1 = select(user, user.c.id)
+ >>> s2 = select(c1, c2, c3)
>>> from sqlalchemy import union
>>> u = union(s1, s2)
>>> print(u)
the above, as SQLAlchemy applies auto-labeling to expressions like these, which
are up until now always a so-called "anonymous" expression::
- >>> print(select([cast(foo.c.data, String)]))
+ >>> print(select(cast(foo.c.data, String)))
SELECT CAST(foo.data AS VARCHAR) AS anon_1 # old behavior
FROM foo
reasonable behavior for simple modifications to a single column, most
prominently with CAST::
- >>> print(select([cast(foo.c.data, String)]))
+ >>> print(select(cast(foo.c.data, String)))
SELECT CAST(foo.data AS VARCHAR) AS data
FROM foo
For CAST against expressions that don't have a name, the previous logic is used
to generate the usual "anonymous" labels::
- >>> print(select([cast('hi there,' + foo.c.data, String)]))
+ >>> print(select(cast('hi there,' + foo.c.data, String)))
SELECT CAST(:data_1 + foo.data AS VARCHAR) AS anon_1
FROM foo
expression as these don't render inside of a CAST, will nonetheless make use of
the given name::
- >>> print(select([cast(('hi there,' + foo.c.data).label('hello_data'), String)]))
+ >>> print(select(cast(('hi there,' + foo.c.data).label('hello_data'), String)))
SELECT CAST(:data_1 + foo.data AS VARCHAR) AS hello_data
FROM foo
And of course as was always the case, :class:`.Label` can be applied to the
expression on the outside to apply an "AS <name>" label directly::
- >>> print(select([cast(('hi there,' + foo.c.data), String).label('hello_data')]))
+ >>> print(select(cast(('hi there,' + foo.c.data), String).label('hello_data')))
SELECT CAST(:data_1 + foo.data AS VARCHAR) AS hello_data
FROM foo
from sqlalchemy import type_coerce, String
- stmt = select([my_table]).where(
+ stmt = select(my_table).where(
type_coerce(my_table.c.json_data, String).like('%foo%'))
:class:`.TypeDecorator` provides a built-in system for working up type
Column('geom_data', Geometry)
)
- print(select([geometry]).where(
+ print(select(geometry).where(
geometry.c.geom_data == 'LINESTRING(189412 252431,189631 259122)'))
The resulting SQL embeds both functions as appropriate. ``ST_AsText``
a :func:`_expression.select` against a :func:`.label` of our expression, the string
label is moved to the outside of the wrapped expression::
- print(select([geometry.c.geom_data.label('my_data')]))
+ print(select(geometry.c.geom_data.label('my_data')))
Output::
message="this is my message")
print(conn.scalar(
- select([message.c.message]).\
+ select(message.c.message).\
where(message.c.username == "some user")
))
Column('create_date', DateTime, default=func.now()),
# define 'key' to pull its default from the 'keyvalues' table
- Column('key', String(20), default=select([keyvalues.c.key]).where(keyvalues.c.type='type1')),
+ Column('key', String(20), default=select(keyvalues.c.key).where(keyvalues.c.type='type1')),
# define 'last_modified' to use the current_timestamp SQL function on update
Column('last_modified', DateTime, onupdate=func.utc_timestamp())
appropriate for the target backend::
>>> my_seq = Sequence('some_sequence')
- >>> stmt = select([my_seq.next_value()])
+ >>> stmt = select(my_seq.next_value())
>>> print(stmt.compile(dialect=postgresql.dialect()))
SELECT nextval('some_sequence') AS next_value_1
rendering, return types and argument behavior. Generic functions are invoked
like all SQL functions, using the :attr:`func` attribute::
- select([func.count()]).select_from(sometable)
+ select(func.count()).select_from(sometable)
Note that any name not known to :attr:`func` generates the function name as is
- there is no restriction on what SQL functions can be called, known or
# run a SELECT 1. use a core select() so that
# the SELECT of a scalar value without a table is
# appropriately formatted for the backend
- connection.scalar(select([1]))
+ connection.scalar(select(1))
except exc.DBAPIError as err:
# catch SQLAlchemy's DBAPIError, which is a wrapper
# for the DBAPI's exception. It includes a .connection_invalidated
# itself and establish a new connection. The disconnect detection
# here also causes the whole connection pool to be invalidated
# so that all stale connections are discarded.
- connection.scalar(select([1]))
+ connection.scalar(select(1))
else:
raise
finally:
metadata = MetaData()
table = Table('t', metadata, Column('q', Integer))
- stmt = select([table])
+ stmt = select(table)
result = stmt.execute() # <--- raises
What the logic is expecting is that the :class:`_schema.MetaData` object has
implicitly or explicitly and does not provide a value when the statement
is executed::
- stmt = select([table.c.column]).where(table.c.id == bindparam('my_param'))
+ stmt = select(table.c.column).where(table.c.id == bindparam('my_param'))
result = conn.execute(stmt)
Column('b', Integer),
Column('c', Integer)
)
- stmt = select([t])
+ stmt = select(t)
Above, ``stmt`` represents a SELECT statement. The error is produced when we want
to use ``stmt`` directly as a FROM clause in another SELECT, such as if we
attempted to select from it::
- new_stmt_1 = select([stmt])
+ new_stmt_1 = select(stmt)
Or if we wanted to use it in a FROM clause such as in a JOIN::
- new_stmt_2 = select([some_table]).select_from(some_table.join(stmt))
+ new_stmt_2 = select(some_table).select_from(some_table.join(stmt))
In previous versions of SQLAlchemy, using a SELECT inside of another SELECT
would produce a parenthesized, unnamed subquery. In most cases, this form of
subq = stmt.subquery()
- new_stmt_1 = select([subq])
+ new_stmt_1 = select(subq)
- new_stmt_2 = select([some_table]).select_from(some_table.join(subq))
+ new_stmt_2 = select(some_table).select_from(some_table.join(subq))
.. seealso::
>>> from sqlalchemy import table, column, select
>>> t = table('my_table', column('x'))
- >>> statement = select([t])
+ >>> statement = select(t)
>>> print(str(statement))
SELECT my_table.x
FROM my_table
t = table('t', column('x'))
- s = select([t]).where(t.c.x == 5)
+ s = select(t).where(t.c.x == 5)
# **do not use** with untrusted input!!!
print(s.compile(compile_kwargs={"literal_binds": True}))
as an ORDER BY clause by calling upon the :meth:`_expression.Select.where`
and :meth:`_expression.Select.order_by` methods::
- stmt = select([user.c.name]).\
+ stmt = select(user.c.name).\
where(user.c.id > 5).\
where(user.c.name.like('e%').\
order_by(user.c.name)
id = Column(Integer, primary_key=True)
a_id = Column(ForeignKey("a.id"))
- partition = select([
+ partition = select(
B,
func.row_number().over(
order_by=B.id, partition_by=B.a_id
).label('index')
- ]).alias()
+ ).alias()
partitioned_b = aliased(B, partition)
to a string SQL statement::
# label the columns of the addresses table
- eager_columns = select([
+ eager_columns = select(
addresses.c.address_id.label('a1'),
addresses.c.email_address.label('a2'),
addresses.c.user_id.label('a3')
- ])
+ )
# select from a raw SQL statement which uses those label names for the
# addresses table. contains_eager() matches them up.
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
address_count = column_property(
- select([func.count(Address.id)]).\
+ select(func.count(Address.id)).\
where(Address.user_id==id).\
correlate_except(Address)
)
In the above example, we define a :func:`_expression.select` construct like the following::
- select([func.count(Address.id)]).\
+ select(func.count(Address.id)).\
where(Address.user_id==id).\
correlate_except(Address)
to add an additional property after the fact::
User.address_count = column_property(
- select([func.count(Address.id)]).\
+ select(func.count(Address.id)).\
where(Address.user_id==User.id)
)
# ...
book_count = column_property(
- select(
- [func.count(books.c.id)]
+ select(func.count(books.c.id)
).where(
and_(
book_authors.c.author_id==authors.c.id,
def address_count(self):
return object_session(self).\
scalar(
- select([func.count(Address.id)]).\
+ select(func.count(Address.id)).\
where(Address.user_id==self.id)
)
from sqlalchemy import select, func
- subq = select([
+ subq = select(
func.count(orders.c.id).label('order_count'),
func.max(orders.c.price).label('highest_order'),
orders.c.customer_id
- ]).group_by(orders.c.customer_id).alias()
+ ).group_by(orders.c.customer_id).alias()
- customer_select = select([customers, subq]).\
- select_from(
- join(customers, subq,
- customers.c.id == subq.c.customer_id)
- ).alias()
+ customer_select = select(customers, subq).select_from(
+ join(customers, subq, customers.c.id == subq.c.customer_id)
+ ).alias()
class Customer(Base):
__table__ = customer_select
session = Session(e)
- foo = Foo(pk=sql.select([sql.func.coalesce(sql.func.max(Foo.pk) + 1, 1)])
+ foo = Foo(pk=sql.select(sql.func.coalesce(sql.func.max(Foo.pk) + 1, 1))
session.add(foo)
session.commit()
result = session.execute("select * from table where id=:id", {'id':7})
# execute a SQL expression construct
- result = session.execute(select([mytable]).where(mytable.c.id==7))
+ result = session.execute(select(mytable).where(mytable.c.id==7))
The current :class:`~sqlalchemy.engine.Connection` held by the
:class:`~sqlalchemy.orm.session.Session` is accessible using the
)
result = session.execute(
- select([mytable], mytable.c.id==7),
+ select(mytable).where(mytable.c.id==7),
bind_arguments={'mapper': MyMappedClass}
)
# Finding the ancestors is a little bit trickier. We need to create a fake
# secondary table since this behaves like a many-to-many join.
secondary = select(
- [
- id.label("id"),
- func.unnest(
- cast(
- func.string_to_array(
- func.regexp_replace(path, r"\.?\d+$", ""), "."
- ),
- ARRAY(Integer),
- )
- ).label("ancestor_id"),
- ]
+ id.label("id"),
+ func.unnest(
+ cast(
+ func.string_to_array(
+ func.regexp_replace(path, r"\.?\d+$", ""), "."
+ ),
+ ARRAY(Integer),
+ )
+ ).label("ancestor_id"),
).alias()
ancestors = relationship(
"Node",
else:
personnel = mapper.mapped_table
right_most_sibling = connection.scalar(
- select([personnel.c.rgt]).where(
+ select(personnel.c.rgt).where(
personnel.c.emp == instance.parent.emp
)
)
with engine.connect() as conn:
for id_ in random.sample(ids, n):
- stmt = select([Customer.__table__]).where(Customer.id == id_)
+ stmt = select(Customer.__table__).where(Customer.id == id_)
row = conn.execute(stmt).first()
tuple(row)
compiled_cache=compiled_cache
) as conn:
for id_ in random.sample(ids, n):
- stmt = select([Customer.__table__]).where(Customer.id == id_)
+ stmt = select(Customer.__table__).where(Customer.id == id_)
row = conn.execute(stmt).first()
tuple(row)
def test_core_reuse_stmt(n):
"""test core, reusing the same statement (but recompiling each time)."""
- stmt = select([Customer.__table__]).where(Customer.id == bindparam("id"))
+ stmt = select(Customer.__table__).where(Customer.id == bindparam("id"))
with engine.connect() as conn:
for id_ in random.sample(ids, n):
def test_core_reuse_stmt_compiled_cache(n):
"""test core, reusing the same statement + compiled cache."""
- stmt = select([Customer.__table__]).where(Customer.id == bindparam("id"))
+ stmt = select(Customer.__table__).where(Customer.id == bindparam("id"))
compiled_cache = {}
with engine.connect().execution_options(
compiled_cache=compiled_cache
for c in gis_cols:
bind.execute(
select(
- [
- func.DropGeometryColumn(
- "public", table.name, c.name
- )
- ],
- autocommit=True,
- )
+ func.DropGeometryColumn(
+ "public", table.name, c.name
+ )
+ ).execution_options(autocommit=True)
)
elif event == "after-create":
if isinstance(c.type, Geometry):
bind.execute(
select(
- [
- func.AddGeometryColumn(
- table.name,
- c.name,
- c.type.srid,
- c.type.name,
- c.type.dimension,
- )
- ],
- autocommit=True,
- )
+ func.AddGeometryColumn(
+ table.name,
+ c.name,
+ c.type.srid,
+ c.type.name,
+ c.type.dimension,
+ )
+ ).execution_options(autocommit=True)
)
elif event == "after-drop":
table.columns = table.info.pop("_saved_columns")
# core usage just fine:
road_table = Road.__table__
- stmt = select([road_table]).where(
+ stmt = select(road_table).where(
road_table.c.road_geom.intersects(r1.road_geom)
)
print(session.execute(stmt).fetchall())
# look up the hex binary version, using SQLAlchemy casts
as_binary = session.scalar(
- select([type_coerce(r.road_geom, Geometry(coerce_="binary"))])
+ select(type_coerce(r.road_geom, Geometry(coerce_="binary")))
)
assert as_binary.as_hex == (
"01020000000200000000000000b832084100000000"
# back again, same method !
as_text = session.scalar(
- select([type_coerce(as_binary, Geometry(coerce_="text"))])
+ select(type_coerce(as_binary, Geometry(coerce_="text")))
)
assert as_text.desc == "LINESTRING(198231 263418,198213 268322)"
sess.commit()
actual_changed_base = sess.scalar(
- select([BaseClass.__history_mapper__.local_table.c.changed])
+ select(BaseClass.__history_mapper__.local_table.c.changed)
)
actual_changed_sub = sess.scalar(
- select([SubClass.__history_mapper__.local_table.c.changed])
+ select(SubClass.__history_mapper__.local_table.c.changed)
)
h1 = sess.query(BaseClassHistory).first()
eq_(h1.changed, actual_changed_base)
def __declare_last__(cls):
alias = cls.__table__.alias()
cls.calc_is_current_version = column_property(
- select([func.max(alias.c.version_id) == cls.version_id]).where(
+ select(func.max(alias.c.version_id) == cls.version_id).where(
alias.c.id == cls.id
)
)
Server support the TOP keyword. This syntax is used for all SQL Server
versions when no OFFSET clause is present. A statement such as::
- select([some_table]).limit(5)
+ select(some_table).limit(5)
will render similarly to::
LIMIT and OFFSET, or just OFFSET alone, will be rendered using the
``ROW_NUMBER()`` window function. A statement such as::
- select([some_table]).order_by(some_table.c.col3).limit(5).offset(10)
+ select(some_table).order_by(some_table.c.col3).limit(5).offset(10)
will render similarly to::
from sqlalchemy import Numeric
from sqlalchemy.dialects.mssql import try_cast
- stmt = select([
+ stmt = select(
try_cast(product_table.c.unit_price, Numeric(10, 4))
- ])
+ )
The above would render::
mssql_rn = sql.column("mssql_rn")
limitselect = sql.select(
- [c for c in select.c if c.key != "mssql_rn"]
+ *[c for c in select.c if c.key != "mssql_rn"]
)
if offset_clause is not None:
limitselect = limitselect.where(mssql_rn > offset_clause)
@reflection.cache
def get_schema_names(self, connection, **kw):
- s = sql.select(
- [ischema.schemata.c.schema_name],
- order_by=[ischema.schemata.c.schema_name],
+ s = sql.select(ischema.schemata.c.schema_name).order_by(
+ ischema.schemata.c.schema_name
)
schema_names = [r[0] for r in connection.execute(s)]
return schema_names
# Wrap the middle select and add the hint
inner_subquery = select.alias()
limitselect = sql.select(
- [
+ *[
c
for c in inner_subquery.c
if orig_select.selected_columns.corresponding_column(c)
limit_subquery = limitselect.alias()
origselect_cols = orig_select.selected_columns
offsetselect = sql.select(
- [
+ *[
c
for c in limit_subquery.c
if origselect_cols.corresponding_column(c)
from sqlalchemy.dialects import postgresql
from sqlalchemy import select, func
- stmt = select([
- array([1,2]) + array([3,4,5])
- ])
+ stmt = select(array([1,2]) + array([3,4,5]))
print(stmt.compile(dialect=postgresql.dialect()))
recursively adding the dimensions of the inner :class:`_types.ARRAY`
type::
- stmt = select([
+ stmt = select(
array([
array([1, 2]), array([3, 4]), array([column('q'), column('x')])
])
- ])
+ )
print(stmt.compile(dialect=postgresql.dialect()))
Produces::
method on any textual column expression.
On a PostgreSQL dialect, an expression like the following::
- select([sometable.c.text.match("search string")])
+ select(sometable.c.text.match("search string"))
will emit to the database::
and ``to_tsvector()`` are available
explicitly using the standard :data:`.func` construct. For example::
- select([
- func.to_tsvector('fat cats ate rats').match('cat & rat')
- ])
+ select(func.to_tsvector('fat cats ate rats').match('cat & rat'))
Emits the equivalent of::
from sqlalchemy.dialects.postgresql import TSVECTOR
from sqlalchemy import select, cast
- select([cast("some text", TSVECTOR)])
+ select(cast("some text", TSVECTOR))
produces a statement equivalent to::
search strategies, the ``match`` method accepts a ``postgresql_regconfig``
keyword argument::
- select([mytable.c.id]).where(
+ select(mytable.c.id).where(
mytable.c.title.match('somestring', postgresql_regconfig='english')
)
One can also specifically pass in a `'regconfig'` value to the
``to_tsvector()`` command as the initial argument::
- select([mytable.c.id]).where(
+ select(mytable.c.id).where(
func.to_tsvector('english', mytable.c.title )\
.match('somestring', postgresql_regconfig='english')
)
:func:`_expression.literal_column` function with the name of the table may be
used in its place::
- select(['*']).select_from(func.my_function(literal_column('my_table')))
+ select('*').select_from(func.my_function(literal_column('my_table')))
Will generate the SQL::
from sqlalchemy.dialects.postgresql import aggregate_order_by
expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
- stmt = select([expr])
+ stmt = select(expr)
would represent the expression::
table.c.a,
aggregate_order_by(literal_column("','"), table.c.a)
)
- stmt = select([expr])
+ stmt = select(expr)
Would represent::
from sqlalchemy.dialects.postgresql import array, hstore
- select([hstore('key1', 'value1')])
-
- select([
- hstore(
- array(['key1', 'key2', 'key3']),
- array(['value1', 'value2', 'value3'])
- )
- ])
+ select(hstore('key1', 'value1'))
+
+ select(
+ hstore(
+ array(['key1', 'key2', 'key3']),
+ array(['value1', 'value2', 'value3'])
+ )
+ )
.. seealso::
E.g.::
- select([data_table.c.data['some key'].astext])
+ select(data_table.c.data['some key'].astext)
.. seealso::
@util.memoized_property
def _dialect_specific_select_one(self):
- return str(expression.select([1]).compile(dialect=self))
+ return str(expression.select(1).compile(dialect=self))
def do_ping(self, dbapi_connection):
cursor = None
default_arg = expression.type_coerce(default.arg, type_)
else:
default_arg = default.arg
- compiled = expression.select([default_arg]).compile(
- dialect=self.dialect
- )
+ compiled = expression.select(default_arg).compile(dialect=self.dialect)
compiled_params = compiled.construct_params()
processors = compiled._bind_processors
if compiled.positional:
from sqlalchemy import select
- s = select([MyColumn('x'), MyColumn('y')])
+ s = select(MyColumn('x'), MyColumn('y'))
print(str(s))
Produces::
compiler.process(element.select, **kw)
)
- insert = InsertFromSelect(t1, select([t1]).where(t1.c.x>5))
+ insert = InsertFromSelect(t1, select(t1).where(t1.c.x>5))
print(insert)
Produces::
from sqlalchemy import select, union_all
exp = union_all(
- select([users.c.name, sql_false().label("enrolled")]),
- select([customers.c.name, customers.c.enrolled])
+ select(users.c.name, sql_false().label("enrolled")),
+ select(customers.c.name, customers.c.enrolled)
)
"""
@balance.expression
def balance(cls):
- return select([func.sum(SavingsAccount.balance)]).\
+ return select(func.sum(SavingsAccount.balance)).\
where(SavingsAccount.user_id==cls.id).\
label('total_balance')
"""
self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
+ if fromclauses and fromclauses[0] in {None, False}:
self._correlate = ()
else:
self._correlate = set(self._correlate).union(
Given a case for :func:`.aliased` such as selecting ``User``
objects from a SELECT statement::
- select_stmt = select([User]).where(User.id == 7)
+ select_stmt = select(User).where(User.id == 7)
user_alias = aliased(User, select_stmt)
q = session.query(user_alias).\
result = []
for type_, table in table_map.items():
if typecolname is not None:
- result.append(
- sql.select(
- [col(name, table) for name in colnames]
- + [
- sql.literal_column(
- sql_util._quote_ddl_expr(type_)
- ).label(typecolname)
- ],
- from_obj=[table],
- )
+ cols = [col(name, table) for name in colnames]
+ cols.append(
+ sql.literal_column(sql_util._quote_ddl_expr(type_)).label(
+ typecolname
+ ),
)
+ result.append(sql.select(*cols).select_from(table))
else:
result.append(
sql.select(
- [col(name, table) for name in colnames], from_obj=[table]
- )
+ *[col(name, table) for name in colnames]
+ ).select_from(table)
)
return sql.union_all(*result).alias(aliasname)
The :meth:`execution_options` method is generative. A new
instance of this statement is returned that contains the options::
- statement = select([table.c.x, table.c.y])
+ statement = select(table.c.x, table.c.y)
statement = statement.execution_options(autocommit=True)
Note that only a subset of possible execution options can be applied
e.g.::
- sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
+ sel = select(table1.c.a, table1.c.b).where(table1.c.c > 5)
ins = table2.insert().from_select(['a', 'b'], sel)
:param names: a sequence of string column names or
subquery::
users.update().values(name='ed').where(
- users.c.name==select([addresses.c.email_address]).\
+ users.c.name==select(addresses.c.email_address).\
where(addresses.c.user_id==users.c.id).\
scalar_subquery()
)
the subquery to the outer table being updated::
users.update().values(
- name=select([addresses.c.email_address]).\
+ name=select(addresses.c.email_address).\
where(addresses.c.user_id==users.c.id).\
scalar_subquery()
)
subquery::
users.delete().where(
- users.c.name==select([addresses.c.email_address]).\
+ users.c.name==select(addresses.c.email_address).\
where(addresses.c.user_id==users.c.id).\
scalar_subquery()
)
E.g.::
from sqlalchemy import between
- stmt = select([users_table]).where(between(users_table.c.id, 5, 7))
+ stmt = select(users_table).where(between(users_table.c.id, 5, 7))
Would produce SQL resembling::
:meth:`_expression.ColumnElement.between` method available on all
SQL expressions, as in::
- stmt = select([users_table]).where(users_table.c.id.between(5, 7))
+ stmt = select(users_table).where(users_table.c.id.between(5, 7))
All arguments passed to :func:`.between`, including the left side
column expression, are coerced from Python scalar values if a
t = table('t', column('x'))
- s = select([t]).where(t.c.x == 5)
+ s = select(t).where(t.c.x == 5)
print(s.compile(compile_kwargs={"literal_binds": True}))
from sqlalchemy import bindparam
- stmt = select([users_table]).\
+ stmt = select(users_table).\
where(users_table.c.name == bindparam('username'))
Detailed discussion of how :class:`.BindParameter` is used is
from sqlalchemy import bindparam
- stmt = select([users_table]).\
+ stmt = select(users_table).\
where(users_table.c.name == bindparam('username'))
The above statement, when rendered, will produce SQL similar to::
along where it is later used within statement execution. If we
invoke a statement like the following::
- stmt = select([users_table]).where(users_table.c.name == 'Wendy')
+ stmt = select(users_table).where(users_table.c.name == 'Wendy')
result = connection.execute(stmt)
We would see SQL logging output as::
a literal string SQL fragment is specified as part of a larger query,
such as for the WHERE clause of a SELECT statement::
- s = select([users.c.id, users.c.name]).where(text("id=:user_id"))
+ s = select(users.c.id, users.c.name).where(text("id=:user_id"))
result = connection.execute(s, user_id=12)
:func:`_expression.text` is also used for the construction
stmt = text("SELECT id, name FROM some_table")
stmt = stmt.columns(column('id'), column('name')).subquery('st')
- stmt = select([mytable]).\
+ stmt = select(mytable).\
select_from(
mytable.join(stmt, mytable.c.name == stmt.c.name)
).where(stmt.c.id > 5)
stmt = stmt.columns(id=Integer, name=String).cte('st')
- stmt = select([sometable]).where(sometable.c.id == stmt.c.id)
+ stmt = select(sometable).where(sometable.c.id == stmt.c.id)
:param \*cols: A series of :class:`_expression.ColumnElement` objects,
typically
E.g.::
>>> from sqlalchemy import false
- >>> print(select([t.c.x]).where(false()))
+ >>> print(select(t.c.x).where(false()))
SELECT x FROM t WHERE false
A backend which does not support true/false constants will render as
an expression against 1 or 0::
- >>> print(select([t.c.x]).where(false()))
+ >>> print(select(t.c.x).where(false()))
SELECT x FROM t WHERE 0 = 1
The :func:`.true` and :func:`.false` constants also feature
"short circuit" operation within an :func:`.and_` or :func:`.or_`
conjunction::
- >>> print(select([t.c.x]).where(or_(t.c.x > 5, true())))
+ >>> print(select(t.c.x).where(or_(t.c.x > 5, true())))
SELECT x FROM t WHERE true
- >>> print(select([t.c.x]).where(and_(t.c.x > 5, false())))
+ >>> print(select(t.c.x).where(and_(t.c.x > 5, false())))
SELECT x FROM t WHERE false
.. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
E.g.::
>>> from sqlalchemy import true
- >>> print(select([t.c.x]).where(true()))
+ >>> print(select(t.c.x).where(true()))
SELECT x FROM t WHERE true
A backend which does not support true/false constants will render as
an expression against 1 or 0::
- >>> print(select([t.c.x]).where(true()))
+ >>> print(select(t.c.x).where(true()))
SELECT x FROM t WHERE 1 = 1
The :func:`.true` and :func:`.false` constants also feature
"short circuit" operation within an :func:`.and_` or :func:`.or_`
conjunction::
- >>> print(select([t.c.x]).where(or_(t.c.x > 5, true())))
+ >>> print(select(t.c.x).where(or_(t.c.x > 5, true())))
SELECT x FROM t WHERE true
- >>> print(select([t.c.x]).where(and_(t.c.x > 5, false())))
+ >>> print(select(t.c.x).where(and_(t.c.x > 5, false())))
SELECT x FROM t WHERE false
.. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
from sqlalchemy import and_
- stmt = select([users_table]).where(
+ stmt = select(users_table).where(
and_(
users_table.c.name == 'wendy',
users_table.c.enrolled == True
need to be parenthesized in order to function with Python
operator precedence behavior)::
- stmt = select([users_table]).where(
+ stmt = select(users_table).where(
(users_table.c.name == 'wendy') &
(users_table.c.enrolled == True)
)
times against a statement, which will have the effect of each
clause being combined using :func:`.and_`::
- stmt = select([users_table]).\
+ stmt = select(users_table).\
where(users_table.c.name == 'wendy').\
where(users_table.c.enrolled == True)
from sqlalchemy import or_
- stmt = select([users_table]).where(
+ stmt = select(users_table).where(
or_(
users_table.c.name == 'wendy',
users_table.c.name == 'jack'
need to be parenthesized in order to function with Python
operator precedence behavior)::
- stmt = select([users_table]).where(
+ stmt = select(users_table).where(
(users_table.c.name == 'wendy') |
(users_table.c.name == 'jack')
)
from sqlalchemy import case
- stmt = select([users_table]).\
+ stmt = select(users_table).\
where(
case(
(users_table.c.name == 'wendy', 'W'),
from sqlalchemy import case
- stmt = select([users_table]).\
+ stmt = select(users_table).\
where(
case(
(users_table.c.name == 'wendy', 'W'),
compared against keyed to result expressions. The statement below is
equivalent to the preceding statement::
- stmt = select([users_table]).\
+ stmt = select(users_table).\
where(
case(
{"wendy": "W", "jack": "J"},
from sqlalchemy import cast, Numeric
- stmt = select([
- cast(product_table.c.unit_price, Numeric(10, 4))
- ])
+ stmt = select(cast(product_table.c.unit_price, Numeric(10, 4)))
Details on :class:`.Cast` usage is at :func:`.cast`.
from sqlalchemy import cast, Numeric
- stmt = select([
- cast(product_table.c.unit_price, Numeric(10, 4))
- ])
+ stmt = select(cast(product_table.c.unit_price, Numeric(10, 4)))
The above statement will produce SQL resembling::
from sqlalchemy import type_coerce
- stmt = select([type_coerce(log_table.date_string, StringDateTime())])
+ stmt = select(type_coerce(log_table.date_string, StringDateTime()))
The above construct will produce a :class:`.TypeCoerce` object, which
does not modify the rendering in any way on the SQL side, with the
In order to provide a named label for the expression, use
:meth:`_expression.ColumnElement.label`::
- stmt = select([
+ stmt = select(
type_coerce(log_table.date_string, StringDateTime()).label('date')
- ])
+ )
A type that features bound-value handling will also have that behavior
# bound-value handling of MyStringType will be applied to the
# literal value "some string"
- stmt = select([type_coerce("some string", MyStringType)])
+ stmt = select(type_coerce("some string", MyStringType))
When using :func:`.type_coerce` with composed expressions, note that
**parenthesis are not applied**. If :func:`.type_coerce` is being
from sqlalchemy import desc, nullsfirst
- stmt = select([users_table]).order_by(
+ stmt = select(users_table).order_by(
nullsfirst(desc(users_table.c.name)))
The SQL expression from the above would resemble::
rather than as its standalone
function version, as in::
- stmt = select([users_table]).order_by(
+ stmt = select(users_table).order_by(
users_table.c.name.desc().nullsfirst())
.. seealso::
from sqlalchemy import desc, nullslast
- stmt = select([users_table]).order_by(
+ stmt = select(users_table).order_by(
nullslast(desc(users_table.c.name)))
The SQL expression from the above would resemble::
rather than as its standalone
function version, as in::
- stmt = select([users_table]).order_by(
+ stmt = select(users_table).order_by(
users_table.c.name.desc().nullslast())
.. seealso::
from sqlalchemy import desc
- stmt = select([users_table]).order_by(desc(users_table.c.name))
+ stmt = select(users_table).order_by(desc(users_table.c.name))
will produce SQL as::
e.g.::
- stmt = select([users_table]).order_by(users_table.c.name.desc())
+ stmt = select(users_table).order_by(users_table.c.name.desc())
:param column: A :class:`_expression.ColumnElement` (e.g.
scalar SQL expression)
e.g.::
from sqlalchemy import asc
- stmt = select([users_table]).order_by(asc(users_table.c.name))
+ stmt = select(users_table).order_by(asc(users_table.c.name))
will produce SQL as::
e.g.::
- stmt = select([users_table]).order_by(users_table.c.name.asc())
+ stmt = select(users_table).order_by(users_table.c.name.asc())
:param column: A :class:`_expression.ColumnElement` (e.g.
scalar SQL expression)
as in::
from sqlalchemy import distinct, func
- stmt = select([func.count(distinct(users_table.c.name))])
+ stmt = select(func.count(distinct(users_table.c.name)))
The above would produce an expression resembling::
The :func:`.distinct` function is also available as a column-level
method, e.g. :meth:`_expression.ColumnElement.distinct`, as in::
- stmt = select([func.count(users_table.c.name.distinct())])
+ stmt = select(func.count(users_table.c.name.distinct()))
The :func:`.distinct` operator is different from the
:meth:`_expression.Select.distinct` method of
expr = 5 == any_(mytable.c.somearray)
# mysql '5 = ANY (SELECT value FROM table)'
- expr = 5 == any_(select([table.c.value]))
+ expr = 5 == any_(select(table.c.value))
.. versionadded:: 1.1
expr = 5 == all_(mytable.c.somearray)
# mysql '5 = ALL (SELECT value FROM table)'
- expr = 5 == all_(select([table.c.value]))
+ expr = 5 == all_(select(table.c.value))
.. versionadded:: 1.1
the :meth:`.FunctionElement.within_group` method, e.g.::
from sqlalchemy import within_group
- stmt = select([
+ stmt = select(
department.c.id,
func.percentile_cont(0.5).within_group(
department.c.salary.desc()
)
- ])
+ )
The above statement would produce SQL similar to
``SELECT department.id, percentile_cont(0.5)
from sqlalchemy import column
id, name = column("id"), column("name")
- stmt = select([id, name]).select_from("user")
+ stmt = select(id, name).select_from("user")
The above statement would produce SQL like::
from sqlalchemy import column
id, name = column("id"), column("name")
- stmt = select([id, name]).select_from("user")
+ stmt = select(id, name).select_from("user")
The above statement would produce SQL like::
from sqlalchemy.sql import column
id, name = column("id"), column("name")
- stmt = select([id, name]).select_from("user")
+ stmt = select(id, name).select_from("user")
The text handled by :func:`_expression.column`
is assumed to be handled
column("description"),
)
- stmt = select([user.c.description]).where(user.c.name == 'wendy')
+ stmt = select(user.c.description).where(user.c.name == 'wendy')
A :func:`_expression.column` / :func:`.table`
construct like that illustrated
from sqlalchemy.sql import column
- stmt = select([column('x'), column('y')]).\
+ stmt = select(column('x'), column('y')).\
select_from(func.myfunction())
from sqlalchemy.sql import column
- stmt = select([column('data_view')]).\
+ stmt = select(column('data_view')).\
select_from(SomeTable).\
select_from(func.unnest(SomeTable.data).alias('data_view')
)
This is shorthand for::
- s = select([function_element])
+ s = select(function_element)
"""
s = Select._create_select(self)
The returned object is an instance of :class:`.Function`, and is a
column-oriented SQL element like any other, and is used in that way::
- >>> print(select([func.count(table.c.id)]))
+ >>> print(select(func.count(table.c.id)))
SELECT count(sometable.id) FROM sometable
Any name can be given to :data:`.func`. If the function name is unknown to
class as_utc(GenericFunction):
type = DateTime
- print(select([func.as_utc()]))
+ print(select(func.as_utc()))
User-defined generic functions can be organized into
packages by specifying the "package" attribute when defining
The above function would be available from :data:`.func`
using the package name ``time``::
- print(select([func.time.as_utc()]))
+ print(select(func.time.as_utc()))
A final option is to allow the function to be accessed
from one name in :data:`.func` but to render as a different name.
my_table = table('some_table', column('id'))
- stmt = select([func.count()]).select_from(my_table)
+ stmt = select(func.count()).select_from(my_table)
Executing ``stmt`` would emit::
e.g.::
- stmt = select([func.array_agg(table.c.values)[2:5]])
+ stmt = select(func.array_agg(table.c.values)[2:5])
.. versionadded:: 1.1
e.g. :meth:`_expression.Select.group_by`::
stmt = select(
- [func.sum(table.c.value), table.c.col_1, table.c.col_2]
- ).group_by(func.cube(table.c.col_1, table.c.col_2))
+ func.sum(table.c.value), table.c.col_1, table.c.col_2
+ ).group_by(func.cube(table.c.col_1, table.c.col_2))
.. versionadded:: 1.2
e.g. :meth:`_expression.Select.group_by`::
stmt = select(
- [func.sum(table.c.value), table.c.col_1, table.c.col_2]
+ func.sum(table.c.value), table.c.col_1, table.c.col_2
).group_by(func.rollup(table.c.col_1, table.c.col_2))
.. versionadded:: 1.2
e.g. :meth:`_expression.Select.group_by`::
stmt = select(
- [func.sum(table.c.value), table.c.col_1, table.c.col_2]
+ func.sum(table.c.value), table.c.col_1, table.c.col_2
).group_by(func.grouping_sets(table.c.col_1, table.c.col_2))
In order to group by multiple sets, use the :func:`.tuple_` construct::
from sqlalchemy import tuple_
stmt = select(
- [
- func.sum(table.c.value),
- table.c.col_1, table.c.col_2,
- table.c.col_3]
+ func.sum(table.c.value),
+ table.c.col_1, table.c.col_2,
+ table.c.col_3
).group_by(
func.grouping_sets(
tuple_(table.c.col_1, table.c.col_2),
E.g.::
- stmt = select([sometable]).\
+ stmt = select(sometable).\
where(sometable.c.column.like("%foobar%"))
:param other: expression to be compared
E.g.::
- stmt = select([sometable]).\
+ stmt = select(sometable).\
where(sometable.c.column.ilike("%foobar%"))
:param other: expression to be compared
stmt.where(
column.in_(
- select([othertable.c.y]).
+ select(othertable.c.y).
where(table.c.x == othertable.c.x)
)
)
E.g.::
- stmt = select([sometable]).\
+ stmt = select(sometable).\
where(sometable.c.column.startswith("foobar"))
Since the operator uses ``LIKE``, wildcard characters
E.g.::
- stmt = select([sometable]).\
+ stmt = select(sometable).\
where(sometable.c.column.endswith("foobar"))
Since the operator uses ``LIKE``, wildcard characters
E.g.::
- stmt = select([sometable]).\
+ stmt = select(sometable).\
where(sometable.c.column.contains("foobar"))
Since the operator uses ``LIKE``, wildcard characters
expr = 5 == mytable.c.somearray.any_()
# mysql '5 = ANY (SELECT value FROM table)'
- expr = 5 == select([table.c.value]).scalar_subquery().any_()
+ expr = 5 == select(table.c.value).scalar_subquery().any_()
.. seealso::
expr = 5 == mytable.c.somearray.all_()
# mysql '5 = ALL (SELECT value FROM table)'
- expr = 5 == select([table.c.value]).scalar_subquery().all_()
+ expr = 5 == select(table.c.value).scalar_subquery().all_()
.. seealso::
stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
# MySQL 5.7 optimizer hints
- stmt = select([table]).prefix_with(
+ stmt = select(table).prefix_with(
"/*+ BKA(t1) */", dialect="mysql")
Multiple prefixes can be specified by multiple calls
E.g.::
- stmt = select([col1, col2]).cte().suffix_with(
+ stmt = select(col1, col2).cte().suffix_with(
"cycle empno set y_cycle to 1 default 0", dialect="oracle")
Multiple suffixes can be specified by multiple calls
the table or alias. E.g. when using Oracle, the
following::
- select([mytable]).\
+ select(mytable).\
with_hint(mytable, "index(%(name)s ix_mytable)")
Would render SQL as::
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
- select([mytable]).\
+ select(mytable).\
with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
j = user_table.join(address_table,
user_table.c.id == address_table.c.user_id)
- stmt = select([user_table]).select_from(j)
+ stmt = select(user_table).select_from(j)
would emit SQL along the lines of::
to the construction of SQL expressions using table-bound or
other selectable-bound columns::
- select([mytable]).where(mytable.c.somecolumn == 5)
+ select(mytable).where(mytable.c.somecolumn == 5)
"""
j = join(user_table, address_table,
user_table.c.id == address_table.c.user_id)
- stmt = select([user_table]).select_from(j)
+ stmt = select(user_table).select_from(j)
would emit SQL along the lines of::
from sqlalchemy import select, alias
j = alias(
- select([j.left, j.right]).\
+ select(j.left, j.right).\
select_from(j).\
- with_labels(True).\
+ apply_labels().\
correlate(False),
name=name
)
func.bernoulli(1),
name='alias',
seed=func.random())
- stmt = select([selectable.c.people_id])
+ stmt = select(selectable.c.people_id)
Assuming ``people`` with a column ``people_id``, the above
statement would render as::
Column('quantity', Integer)
)
- regional_sales = select([
+ regional_sales = select(
orders.c.region,
func.sum(orders.c.amount).label('total_sales')
- ]).group_by(orders.c.region).cte("regional_sales")
+ ).group_by(orders.c.region).cte("regional_sales")
- top_regions = select([regional_sales.c.region]).\
+ top_regions = select(regional_sales.c.region).\
where(
regional_sales.c.total_sales >
- select([
- func.sum(regional_sales.c.total_sales)/10
- ])
+ select(
+ func.sum(regional_sales.c.total_sales) / 10
+ )
).cte("top_regions")
- statement = select([
+ statement = select(
orders.c.region,
orders.c.product,
func.sum(orders.c.quantity).label("product_units"),
func.sum(orders.c.amount).label("product_sales")
- ]).where(orders.c.region.in_(
- select([top_regions.c.region])
+ ).where(orders.c.region.in_(
+ select(top_regions.c.region)
)).group_by(orders.c.region, orders.c.product)
result = conn.execute(statement).fetchall()
Column('quantity', Integer),
)
- included_parts = select([
- parts.c.sub_part,
- parts.c.part,
- parts.c.quantity]).\
- where(parts.c.part=='our part').\
- cte(recursive=True)
+ included_parts = select(\
+ parts.c.sub_part, parts.c.part, parts.c.quantity\
+ ).\
+ where(parts.c.part=='our part').\
+ cte(recursive=True)
incl_alias = included_parts.alias()
parts_alias = parts.alias()
included_parts = included_parts.union_all(
- select([
+ select(
parts_alias.c.sub_part,
parts_alias.c.part,
parts_alias.c.quantity
- ]).
- where(parts_alias.c.part==incl_alias.c.sub_part)
+ ).\
+ where(parts_alias.c.part==incl_alias.c.sub_part)
)
- statement = select([
+ statement = select(
included_parts.c.sub_part,
func.sum(included_parts.c.quantity).
label('total_quantity')
- ]).\
+ ).\
group_by(included_parts.c.sub_part)
result = conn.execute(statement).fetchall()
upsert = visitors.insert().from_select(
[visitors.c.product_id, visitors.c.date, visitors.c.count],
- select([literal(product_id), literal(day), literal(count)])
+ select(literal(product_id), literal(day), literal(count))
.where(~exists(update_cte.select()))
)
Given a SELECT statement such as::
- stmt = select([table.c.id, table.c.name])
+ stmt = select(table.c.id, table.c.name)
The above statement might look like::
a named sub-element::
subq = stmt.subquery()
- new_stmt = select([subq])
+ new_stmt = select(subq)
The above renders as::
E.g.::
- stmt = select([table]).with_for_update(nowait=True)
+ stmt = select(table).with_for_update(nowait=True)
On a database like PostgreSQL or Oracle, the above would render a
statement like::
e.g.::
- stmt = select([table]).order_by(table.c.id, table.c.name)
+ stmt = select(table).order_by(table.c.id, table.c.name)
:param \*clauses: a series of :class:`_expression.ColumnElement`
constructs
e.g.::
- stmt = select([table.c.name, func.max(table.c.stat)]).\
+ stmt = select(table.c.name, func.max(table.c.stat)).\
group_by(table.c.name)
:param \*clauses: a series of :class:`_expression.ColumnElement`
table1 = table('t1', column('a'))
table2 = table('t2', column('b'))
- s = select([table1.c.a]).\
+ s = select(table1.c.a).\
select_from(
table1.join(table2, table1.c.a==table2.c.b)
)
if desired, in the case that the FROM clause cannot be fully
derived from the columns clause::
- select([func.count('*')]).select_from(table1)
+ select(func.count('*')).select_from(table1)
"""
"""
self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
+ if fromclauses and fromclauses[0] in {None, False}:
self._correlate = ()
else:
self._correlate = self._correlate + tuple(
"""
self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
+ if fromclauses and fromclauses[0] in {None, False}:
self._correlate_except = ()
else:
self._correlate_except = (self._correlate_except or ()) + tuple(
col1 = column('q', Integer)
col2 = column('p', Integer)
- stmt = select([col1, col2])
+ stmt = select(col1, col2)
Above, ``stmt.selected_columns`` would be a collection that contains
the ``col1`` and ``col2`` objects directly. For a statement that is
E.g.::
>>> from sqlalchemy import cast, select, String
- >>> print(select([cast('some string', String(collation='utf8'))]))
+ >>> print(select(cast('some string', String(collation='utf8'))))
SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
:param convert_unicode: When set to ``True``, the
e.g.::
- stmt = select([
+ stmt = select(
mytable.c.json_column['some_data'].as_boolean()
- ]).where(
+ ).where(
mytable.c.json_column['some_data'].as_boolean() == True
)
e.g.::
- stmt = select([
+ stmt = select(
mytable.c.json_column['some_data'].as_string()
- ]).where(
+ ).where(
mytable.c.json_column['some_data'].as_string() ==
'some string'
)
e.g.::
- stmt = select([
+ stmt = select(
mytable.c.json_column['some_data'].as_integer()
- ]).where(
+ ).where(
mytable.c.json_column['some_data'].as_integer() == 5
)
e.g.::
- stmt = select([
+ stmt = select(
mytable.c.json_column['some_data'].as_float()
- ]).where(
+ ).where(
mytable.c.json_column['some_data'].as_float() == 29.75
)
e.g.::
- stmt = select([
- mytable.c.json_column['some_data'].as_json()
- ])
+ stmt = select(mytable.c.json_column['some_data'].as_json())
This is typically the default behavior of indexed elements in any
case.
constructs which will produce the appropriate SQL, both for
SELECT statements::
- select([mytable.c.data[5], mytable.c.data[2:7]])
+ select(mytable.c.data[5], mytable.c.data[2:7])
as well as UPDATE statements when the :meth:`_expression.Update.values`
method
from sqlalchemy.sql import operators
conn.execute(
- select([table.c.data]).where(
+ select(table.c.data).where(
table.c.data.any(7, operator=operators.lt)
)
)
from sqlalchemy.sql import operators
conn.execute(
- select([table.c.data]).where(
+ select(table.c.data).where(
table.c.data.all(7, operator=operators.lt)
)
)
from sqlalchemy.sql import visitors
- stmt = select([some_table]).where(some_table.c.foo == 'bar')
+ stmt = select(some_table).where(some_table.c.foo == 'bar')
def visit_bindparam(bind_param):
print("found bound value: %s" % bind_param.value)
expr = decimal.Decimal("15.7563")
value = e.scalar(
- select([literal(expr)])
+ select(literal(expr))
)
assert value == expr
with config.db.connect() as conn:
cte = (
- select([some_table])
+ select(some_table)
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
result = conn.execute(
- select([cte.c.data]).where(cte.c.data.in_(["d4", "d5"]))
+ select(cte.c.data).where(cte.c.data.in_(["d4", "d5"]))
)
eq_(result.fetchall(), [("d4",)])
with config.db.connect() as conn:
cte = (
- select([some_table])
+ select(some_table)
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte", recursive=True)
)
# note that SQL Server requires this to be UNION ALL,
# can't be UNION
cte = cte.union_all(
- select([st1]).where(st1.c.id == cte_alias.c.parent_id)
+ select(st1).where(st1.c.id == cte_alias.c.parent_id)
)
result = conn.execute(
- select([cte.c.data])
+ select(cte.c.data)
.where(cte.c.data != "d2")
.order_by(cte.c.data.desc())
)
with config.db.connect() as conn:
cte = (
- select([some_table])
+ select(some_table)
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
conn.execute(
some_other_table.insert().from_select(
- ["id", "data", "parent_id"], select([cte])
+ ["id", "data", "parent_id"], select(cte)
)
)
eq_(
conn.execute(
- select([some_other_table]).order_by(some_other_table.c.id)
+ select(some_other_table).order_by(some_other_table.c.id)
).fetchall(),
[(2, "d2", 1), (3, "d3", 1), (4, "d4", 3)],
)
with config.db.connect() as conn:
conn.execute(
some_other_table.insert().from_select(
- ["id", "data", "parent_id"], select([some_table])
+ ["id", "data", "parent_id"], select(some_table)
)
)
cte = (
- select([some_table])
+ select(some_table)
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
)
eq_(
conn.execute(
- select([some_other_table]).order_by(some_other_table.c.id)
+ select(some_other_table).order_by(some_other_table.c.id)
).fetchall(),
[
(1, "d1", None),
with config.db.connect() as conn:
conn.execute(
some_other_table.insert().from_select(
- ["id", "data", "parent_id"], select([some_table])
+ ["id", "data", "parent_id"], select(some_table)
)
)
cte = (
- select([some_table])
+ select(some_table)
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
)
eq_(
conn.execute(
- select([some_other_table]).order_by(some_other_table.c.id)
+ select(some_other_table).order_by(some_other_table.c.id)
).fetchall(),
[(1, "d1", None), (5, "d5", 3)],
)
with config.db.connect() as conn:
conn.execute(
some_other_table.insert().from_select(
- ["id", "data", "parent_id"], select([some_table])
+ ["id", "data", "parent_id"], select(some_table)
)
)
cte = (
- select([some_table])
+ select(some_table)
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
conn.execute(
some_other_table.delete().where(
some_other_table.c.data
- == select([cte.c.data])
+ == select(cte.c.data)
.where(cte.c.id == some_other_table.c.id)
.scalar_subquery()
)
)
eq_(
conn.execute(
- select([some_other_table]).order_by(some_other_table.c.id)
+ select(some_other_table).order_by(some_other_table.c.id)
).fetchall(),
[(1, "d1", None), (5, "d5", 3)],
)
def test_plain_union(self, connection):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2)
- s2 = select([table]).where(table.c.id == 3)
+ s1 = select(table).where(table.c.id == 2)
+ s2 = select(table).where(table.c.id == 3)
u1 = union(s1, s2)
with testing.expect_deprecated(
# it before.
def _dont_test_select_from_plain_union(self, connection):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2)
- s2 = select([table]).where(table.c.id == 3)
+ s1 = select(table).where(table.c.id == 2)
+ s2 = select(table).where(table.c.id == 3)
u1 = union(s1, s2).alias().select()
with testing.expect_deprecated(
@testing.requires.parens_in_union_contained_select_w_limit_offset
def test_limit_offset_selectable_in_unions(self, connection):
table = self.tables.some_table
- s1 = (
- select([table])
- .where(table.c.id == 2)
- .limit(1)
- .order_by(table.c.id)
- )
- s2 = (
- select([table])
- .where(table.c.id == 3)
- .limit(1)
- .order_by(table.c.id)
- )
+ s1 = select(table).where(table.c.id == 2).limit(1).order_by(table.c.id)
+ s2 = select(table).where(table.c.id == 3).limit(1).order_by(table.c.id)
u1 = union(s1, s2).limit(2)
with testing.expect_deprecated(
@testing.requires.parens_in_union_contained_select_wo_limit_offset
def test_order_by_selectable_in_unions(self, connection):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2).order_by(table.c.id)
- s2 = select([table]).where(table.c.id == 3).order_by(table.c.id)
+ s1 = select(table).where(table.c.id == 2).order_by(table.c.id)
+ s2 = select(table).where(table.c.id == 3).order_by(table.c.id)
u1 = union(s1, s2).limit(2)
with testing.expect_deprecated(
def test_distinct_selectable_in_unions(self, connection):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2).distinct()
- s2 = select([table]).where(table.c.id == 3).distinct()
+ s1 = select(table).where(table.c.id == 2).distinct()
+ s2 = select(table).where(table.c.id == 3).distinct()
u1 = union(s1, s2).limit(2)
with testing.expect_deprecated(
def test_limit_offset_aliased_selectable_in_unions(self, connection):
table = self.tables.some_table
s1 = (
- select([table])
+ select(table)
.where(table.c.id == 2)
.limit(1)
.order_by(table.c.id)
.select()
)
s2 = (
- select([table])
+ select(table)
.where(table.c.id == 3)
.limit(1)
.order_by(table.c.id)
# there's no way to make this happen with some drivers like
# mysqlclient, pymysql. this at least does produce a non-
# ascii error message for cx_oracle, psycopg2
- conn.execute(select([literal_column(u"méil")]))
+ conn.execute(select(literal_column(u"méil")))
assert False
except exc.DBAPIError as err:
err_str = str(err)
trans.rollback()
eq_(
- conn.scalar(select([self.tables.some_table.c.id])),
+ conn.scalar(select(self.tables.some_table.c.id)),
1 if autocommit else None,
)
eq_(
conn.scalar(
- select([t.c.data]).where(
+ select(t.c.data).where(
t.c.data == literal_column("'some % value'")
)
),
eq_(
conn.scalar(
- select([t.c.data]).where(
+ select(t.c.data).where(
t.c.data == literal_column("'some %% other value'")
)
),
r = connection.execute(
self.tables.autoinc_pk.insert(), data="some data"
)
- pk = connection.scalar(select([self.tables.autoinc_pk.c.id]))
+ pk = connection.scalar(select(self.tables.autoinc_pk.c.id))
eq_(r.inserted_primary_key, (pk,))
@requirements.dbapi_lastrowid
self.tables.autoinc_pk.insert(), data="some data"
)
lastrowid = r.lastrowid
- pk = connection.scalar(select([self.tables.autoinc_pk.c.id]))
+ pk = connection.scalar(select(self.tables.autoinc_pk.c.id))
eq_(lastrowid, pk)
result = connection.execute(
dest_table.insert().from_select(
("data",),
- select([src_table.c.data]).where(
+ select(src_table.c.data).where(
src_table.c.data.in_(["data2", "data3"])
),
)
eq_(result.inserted_primary_key, (None,))
result = connection.execute(
- select([dest_table.c.data]).order_by(dest_table.c.data)
+ select(dest_table.c.data).order_by(dest_table.c.data)
)
eq_(result.fetchall(), [("data2",), ("data3",)])
result = connection.execute(
dest_table.insert().from_select(
("data",),
- select([src_table.c.data]).where(
+ select(src_table.c.data).where(
src_table.c.data.in_(["data2", "data3"])
),
)
eq_(result.inserted_primary_key, (None,))
result = connection.execute(
- select([dest_table.c.data]).order_by(dest_table.c.data)
+ select(dest_table.c.data).order_by(dest_table.c.data)
)
eq_(result.fetchall(), [])
connection.execute(
table.insert(inline=True).from_select(
("id", "data"),
- select([table.c.id + 5, table.c.data]).where(
+ select(table.c.id + 5, table.c.data).where(
table.c.data.in_(["data2", "data3"])
),
)
eq_(
connection.execute(
- select([table.c.data]).order_by(table.c.data)
+ select(table.c.data).order_by(table.c.data)
).fetchall(),
[("data1",), ("data2",), ("data2",), ("data3",), ("data3",)],
)
connection.execute(
table.insert(inline=True).from_select(
("id", "data"),
- select([table.c.id + 5, table.c.data]).where(
+ select(table.c.id + 5, table.c.data).where(
table.c.data.in_(["data2", "data3"])
),
)
eq_(
connection.execute(
- select([table]).order_by(table.c.data, table.c.id)
+ select(table).order_by(table.c.data, table.c.id)
).fetchall(),
[
(1, "data1", 5, 4),
table.insert().returning(table.c.id), data="some data"
)
pk = r.first()[0]
- fetched_pk = connection.scalar(select([table.c.id]))
+ fetched_pk = connection.scalar(select(table.c.id))
eq_(fetched_pk, pk)
def test_explicit_returning_pk_no_autocommit(self, connection):
table.insert().returning(table.c.id), data="some data"
)
pk = r.first()[0]
- fetched_pk = connection.scalar(select([table.c.id]))
+ fetched_pk = connection.scalar(select(table.c.id))
eq_(fetched_pk, pk)
def test_autoincrement_on_insert_implicit_returning(self, connection):
r = connection.execute(
self.tables.autoinc_pk.insert(), data="some data"
)
- pk = connection.scalar(select([self.tables.autoinc_pk.c.id]))
+ pk = connection.scalar(select(self.tables.autoinc_pk.c.id))
eq_(r.inserted_primary_key, (pk,))
def test_row_with_dupe_names(self, connection):
result = connection.execute(
select(
- [
- self.tables.plain_pk.c.data,
- self.tables.plain_pk.c.data.label("data"),
- ]
+ self.tables.plain_pk.c.data,
+ self.tables.plain_pk.c.data.label("data"),
).order_by(self.tables.plain_pk.c.id)
)
row = result.first()
"""
datetable = self.tables.has_dates
- s = select([datetable.alias("x").c.today]).scalar_subquery()
- s2 = select([datetable.c.id, s.label("somelabel")])
+ s = select(datetable.alias("x").c.today).scalar_subquery()
+ s2 = select(datetable.c.id, s.label("somelabel"))
row = connection.execute(s2).first()
eq_(row.somelabel, datetime.datetime(2006, 5, 12, 12, 0, 0))
@testing.combinations(
("global_string", True, "select 1", True),
("global_text", True, text("select 1"), True),
- ("global_expr", True, select([1]), True),
+ ("global_expr", True, select(1), True),
("global_off_explicit", False, text("select 1"), False),
(
"stmt_option",
False,
- select([1]).execution_options(stream_results=True),
+ select(1).execution_options(stream_results=True),
True,
),
(
"stmt_option_disabled",
True,
- select([1]).execution_options(stream_results=False),
+ select(1).execution_options(stream_results=False),
False,
),
- ("for_update_expr", True, select([1]).with_for_update(), True),
+ ("for_update_expr", True, select(1).with_for_update(), True),
("for_update_string", True, "SELECT 1 FOR UPDATE", True),
("text_no_ss", False, text("select 42"), False),
(
def test_stmt_enabled_conn_option_disabled(self):
engine = self._fixture(False)
- s = select([1]).execution_options(stream_results=True)
+ s = select(1).execution_options(stream_results=True)
# not this one
result = (
def test_aliases_and_ss(self):
engine = self._fixture(False)
- s1 = select([1]).execution_options(stream_results=True).alias()
+ s1 = select(1).execution_options(stream_results=True).alias()
with engine.begin() as conn:
result = conn.execute(s1)
assert self._is_server_side(result.cursor)
# s1's options shouldn't affect s2 when s2 is used as a
# from_obj.
- s2 = select([1], from_obj=s1)
+ s2 = select(1).select_from(s1)
with engine.begin() as conn:
result = conn.execute(s2)
assert not self._is_server_side(result.cursor)
connection.execute(test_table.delete())
eq_(
connection.scalar(
- select([func.count("*")]).select_from(test_table)
+ select(func.count("*")).select_from(test_table)
),
0,
)
collation = testing.requires.get_order_by_collation(testing.config)
self._assert_result(
- select([self.tables.some_table]).order_by(
+ select(self.tables.some_table).order_by(
self.tables.some_table.c.data.collate(collation).asc()
),
[(1, "collate data1"), (2, "collate data2")],
def test_plain(self):
table = self.tables.some_table
lx = table.c.x.label("lx")
- self._assert_result(select([lx]).order_by(lx), [(1,), (2,), (3,)])
+ self._assert_result(select(lx).order_by(lx), [(1,), (2,), (3,)])
def test_composed_int(self):
table = self.tables.some_table
lx = (table.c.x + table.c.y).label("lx")
- self._assert_result(select([lx]).order_by(lx), [(3,), (5,), (7,)])
+ self._assert_result(select(lx).order_by(lx), [(3,), (5,), (7,)])
def test_composed_multiple(self):
table = self.tables.some_table
lx = (table.c.x + table.c.y).label("lx")
ly = (func.lower(table.c.q) + table.c.p).label("ly")
self._assert_result(
- select([lx, ly]).order_by(lx, ly.desc()),
+ select(lx, ly).order_by(lx, ly.desc()),
[(3, util.u("q1p3")), (5, util.u("q2p2")), (7, util.u("q3p1"))],
)
def test_plain_desc(self):
table = self.tables.some_table
lx = table.c.x.label("lx")
- self._assert_result(
- select([lx]).order_by(lx.desc()), [(3,), (2,), (1,)]
- )
+ self._assert_result(select(lx).order_by(lx.desc()), [(3,), (2,), (1,)])
def test_composed_int_desc(self):
table = self.tables.some_table
lx = (table.c.x + table.c.y).label("lx")
- self._assert_result(
- select([lx]).order_by(lx.desc()), [(7,), (5,), (3,)]
- )
+ self._assert_result(select(lx).order_by(lx.desc()), [(7,), (5,), (3,)])
@testing.requires.group_by_complex_expression
def test_group_by_composed(self):
table = self.tables.some_table
expr = (table.c.x + table.c.y).label("lx")
stmt = (
- select([func.count(table.c.id), expr])
- .group_by(expr)
- .order_by(expr)
+ select(func.count(table.c.id), expr).group_by(expr).order_by(expr)
)
self._assert_result(stmt, [(1, 3), (1, 5), (1, 7)])
def test_simple_limit(self):
table = self.tables.some_table
self._assert_result(
- select([table]).order_by(table.c.id).limit(2),
+ select(table).order_by(table.c.id).limit(2),
[(1, 1, 2), (2, 2, 3)],
)
def test_simple_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table]).order_by(table.c.id).offset(2),
+ select(table).order_by(table.c.id).offset(2),
[(3, 3, 4), (4, 4, 5)],
)
def test_simple_limit_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table]).order_by(table.c.id).limit(2).offset(1),
+ select(table).order_by(table.c.id).limit(2).offset(1),
[(2, 2, 3), (3, 3, 4)],
)
"""test that 'literal binds' mode works - no bound params."""
table = self.tables.some_table
- stmt = select([table]).order_by(table.c.id).limit(2).offset(1)
+ stmt = select(table).order_by(table.c.id).limit(2).offset(1)
sql = stmt.compile(
dialect=config.db.dialect, compile_kwargs={"literal_binds": True}
)
def test_bound_limit(self):
table = self.tables.some_table
self._assert_result(
- select([table]).order_by(table.c.id).limit(bindparam("l")),
+ select(table).order_by(table.c.id).limit(bindparam("l")),
[(1, 1, 2), (2, 2, 3)],
params={"l": 2},
)
def test_bound_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table]).order_by(table.c.id).offset(bindparam("o")),
+ select(table).order_by(table.c.id).offset(bindparam("o")),
[(3, 3, 4), (4, 4, 5)],
params={"o": 2},
)
def test_bound_limit_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table])
+ select(table)
.order_by(table.c.id)
.limit(bindparam("l"))
.offset(bindparam("o")),
def test_expr_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table])
+ select(table)
.order_by(table.c.id)
.offset(literal_column("1") + literal_column("2")),
[(4, 4, 5)],
def test_expr_limit(self):
table = self.tables.some_table
self._assert_result(
- select([table])
+ select(table)
.order_by(table.c.id)
.limit(literal_column("1") + literal_column("2")),
[(1, 1, 2), (2, 2, 3), (3, 3, 4)],
def test_expr_limit_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table])
+ select(table)
.order_by(table.c.id)
.limit(literal_column("1") + literal_column("1"))
.offset(literal_column("1") + literal_column("1")),
def test_simple_limit_expr_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table])
+ select(table)
.order_by(table.c.id)
.limit(2)
.offset(literal_column("1") + literal_column("1")),
def test_expr_limit_simple_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table])
+ select(table)
.order_by(table.c.id)
.limit(literal_column("1") + literal_column("1"))
.offset(2),
def test_inner_join_fk(self):
a, b = self.tables("a", "b")
- stmt = select([a, b]).select_from(a.join(b)).order_by(a.c.id, b.c.id)
+ stmt = select(a, b).select_from(a.join(b)).order_by(a.c.id, b.c.id)
self._assert_result(stmt, [(1, 1, 1), (1, 2, 1), (2, 4, 2), (3, 5, 3)])
a, b = self.tables("a", "b")
stmt = (
- select([a, b])
+ select(a, b)
.select_from(a.join(b, true()))
.order_by(a.c.id, b.c.id)
)
a, b = self.tables("a", "b")
stmt = (
- select([a, b])
+ select(a, b)
.select_from(a.join(b, false()))
.order_by(a.c.id, b.c.id)
)
a, b = self.tables("a", "b")
stmt = (
- select([a, b])
+ select(a, b)
.select_from(a.outerjoin(b, false()))
.order_by(a.c.id, b.c.id)
)
def test_outer_join_fk(self):
a, b = self.tables("a", "b")
- stmt = select([a, b]).select_from(a.join(b)).order_by(a.c.id, b.c.id)
+ stmt = select(a, b).select_from(a.join(b)).order_by(a.c.id, b.c.id)
self._assert_result(stmt, [(1, 1, 1), (1, 2, 1), (2, 4, 2), (3, 5, 3)])
def test_plain_union(self):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2)
- s2 = select([table]).where(table.c.id == 3)
+ s1 = select(table).where(table.c.id == 2)
+ s2 = select(table).where(table.c.id == 3)
u1 = union(s1, s2)
self._assert_result(
def test_select_from_plain_union(self):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2)
- s2 = select([table]).where(table.c.id == 3)
+ s1 = select(table).where(table.c.id == 2)
+ s2 = select(table).where(table.c.id == 3)
u1 = union(s1, s2).alias().select()
self._assert_result(
@testing.requires.parens_in_union_contained_select_w_limit_offset
def test_limit_offset_selectable_in_unions(self):
table = self.tables.some_table
- s1 = (
- select([table])
- .where(table.c.id == 2)
- .limit(1)
- .order_by(table.c.id)
- )
- s2 = (
- select([table])
- .where(table.c.id == 3)
- .limit(1)
- .order_by(table.c.id)
- )
+ s1 = select(table).where(table.c.id == 2).limit(1).order_by(table.c.id)
+ s2 = select(table).where(table.c.id == 3).limit(1).order_by(table.c.id)
u1 = union(s1, s2).limit(2)
self._assert_result(
@testing.requires.parens_in_union_contained_select_wo_limit_offset
def test_order_by_selectable_in_unions(self):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2).order_by(table.c.id)
- s2 = select([table]).where(table.c.id == 3).order_by(table.c.id)
+ s1 = select(table).where(table.c.id == 2).order_by(table.c.id)
+ s2 = select(table).where(table.c.id == 3).order_by(table.c.id)
u1 = union(s1, s2).limit(2)
self._assert_result(
def test_distinct_selectable_in_unions(self):
table = self.tables.some_table
- s1 = select([table]).where(table.c.id == 2).distinct()
- s2 = select([table]).where(table.c.id == 3).distinct()
+ s1 = select(table).where(table.c.id == 2).distinct()
+ s2 = select(table).where(table.c.id == 3).distinct()
u1 = union(s1, s2).limit(2)
self._assert_result(
@testing.requires.parens_in_union_contained_select_w_limit_offset
def test_limit_offset_in_unions_from_alias(self):
table = self.tables.some_table
- s1 = (
- select([table])
- .where(table.c.id == 2)
- .limit(1)
- .order_by(table.c.id)
- )
- s2 = (
- select([table])
- .where(table.c.id == 3)
- .limit(1)
- .order_by(table.c.id)
- )
+ s1 = select(table).where(table.c.id == 2).limit(1).order_by(table.c.id)
+ s2 = select(table).where(table.c.id == 3).limit(1).order_by(table.c.id)
# this necessarily has double parens
u1 = union(s1, s2).alias()
def test_limit_offset_aliased_selectable_in_unions(self):
table = self.tables.some_table
s1 = (
- select([table])
+ select(table)
.where(table.c.id == 2)
.limit(1)
.order_by(table.c.id)
.select()
)
s2 = (
- select([table])
+ select(table)
.where(table.c.id == 3)
.limit(1)
.order_by(table.c.id)
def test_compile(self):
table = self.tables.some_table
- stmt = select([table.c.id]).where(
+ stmt = select(table.c.id).where(
table.c.x == bindparam("q", literal_execute=True)
)
def test_compile_literal_binds(self):
table = self.tables.some_table
- stmt = select([table.c.id]).where(
+ stmt = select(table.c.id).where(
table.c.x == bindparam("q", 10, literal_execute=True)
)
def test_execute(self):
table = self.tables.some_table
- stmt = select([table.c.id]).where(
+ stmt = select(table.c.id).where(
table.c.x == bindparam("q", literal_execute=True)
)
def test_execute_expanding_plus_literal_execute(self):
table = self.tables.some_table
- stmt = select([table.c.id]).where(
+ stmt = select(table.c.id).where(
table.c.x.in_(bindparam("q", expanding=True, literal_execute=True))
)
def test_execute_tuple_expanding_plus_literal_execute(self):
table = self.tables.some_table
- stmt = select([table.c.id]).where(
+ stmt = select(table.c.id).where(
tuple_(table.c.x, table.c.y).in_(
bindparam("q", expanding=True, literal_execute=True)
)
def test_execute_tuple_expanding_plus_literal_heterogeneous_execute(self):
table = self.tables.some_table
- stmt = select([table.c.id]).where(
+ stmt = select(table.c.id).where(
tuple_(table.c.x, table.c.z).in_(
bindparam("q", expanding=True, literal_execute=True)
)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(table.c.x.in_(bindparam("q", expanding=True)))
.where(table.c.y.in_(bindparam("p", expanding=True)))
.order_by(table.c.id)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(
tuple_(table.c.x, table.c.z).in_(
bindparam("q", expanding=True)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(
tuple_(table.c.x, table.c.y).in_(
bindparam("q", expanding=True)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(table.c.x.in_(bindparam("q", expanding=True)))
.order_by(table.c.id)
)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(
tuple_(table.c.x, table.c.y).in_(
bindparam("q", expanding=True)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(
tuple_(table.c.x, table.c.z).in_(
bindparam("q", expanding=True)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(table.c.x.in_(bindparam("q", expanding=True)))
.order_by(table.c.id)
)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(table.c.x.notin_(bindparam("q", expanding=True)))
.order_by(table.c.id)
)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(table.c.z.in_(bindparam("q", expanding=True)))
.order_by(table.c.id)
)
table = self.tables.some_table
stmt = (
- select([table.c.id])
+ select(table.c.id)
.where(table.c.z.notin_(bindparam("q", expanding=True)))
.order_by(table.c.id)
)
def test_null_in_empty_set_is_false(self, connection):
stmt = select(
- [
- case(
- [
- (
- null().in_(
- bindparam("foo", value=(), expanding=True)
- ),
- true(),
- )
- ],
- else_=false(),
- )
- ]
+ case(
+ [
+ (
+ null().in_(bindparam("foo", value=(), expanding=True)),
+ true(),
+ )
+ ],
+ else_=false(),
+ )
)
in_(connection.execute(stmt).fetchone()[0], (False, 0))
with config.db.connect() as conn:
rows = {
value
- for value, in conn.execute(
- select([some_table.c.id]).where(expr)
- )
+ for value, in conn.execute(select(some_table.c.id).where(expr))
}
eq_(rows, expected)
def test_select_all(self):
with config.db.connect() as conn:
res = conn.execute(
- select([text("*")])
+ select(text("*"))
.select_from(self.tables.square)
.order_by(self.tables.square.c.id)
).fetchall()
with config.db.connect() as conn:
res = conn.execute(
select(
- [self.tables.square.c.area, self.tables.square.c.perimeter]
+ self.tables.square.c.area, self.tables.square.c.perimeter
)
.select_from(self.tables.square)
.order_by(self.tables.square.c.id)
def test_select_all(self, connection):
res = connection.execute(
- select([text("*")])
+ select(text("*"))
.select_from(self.tables.tbl_a)
.order_by(self.tables.tbl_a.c.id)
).fetchall()
eq_(res, [(42, "a"), (43, "b")])
res = connection.execute(
- select([text("*")])
+ select(text("*"))
.select_from(self.tables.tbl_b)
.order_by(self.tables.tbl_b.c.id)
).fetchall()
def test_select_columns(self, connection):
res = connection.execute(
- select([self.tables.tbl_a.c.id]).order_by(self.tables.tbl_a.c.id)
+ select(self.tables.tbl_a.c.id).order_by(self.tables.tbl_a.c.id)
).fetchall()
eq_(res, [(42,), (43,)])
@testing.fails_if(testing.requires.supports_distinct_on)
def test_distinct_on(self):
- stm = select(["*"]).distinct(column("q")).select_from(table("foo"))
+ stm = select("*").distinct(column("q")).select_from(table("foo"))
with testing.expect_deprecated(
"DISTINCT ON is currently supported only by the PostgreSQL "
):
def test_select(self):
# give some of the cached type values
# a chance to warm up
- s = select([t1], t1.c.c2 == t2.c.c1)
+ s = select(t1).where(t1.c.c2 == t2.c.c1)
s.compile(dialect=self.dialect)
@profiling.function_call_count(variance=0.15, warmup=1)
def go():
- s = select([t1], t1.c.c2 == t2.c.c1)
+ s = select(t1).where(t1.c.c2 == t2.c.c1)
s.compile(dialect=self.dialect)
go()
def test_select_labels(self):
# give some of the cached type values
# a chance to warm up
- s = select([t1], t1.c.c2 == t2.c.c1).apply_labels()
+ s = select(t1).where(t1.c.c2 == t2.c.c1).apply_labels()
s.compile(dialect=self.dialect)
@profiling.function_call_count(variance=0.15, warmup=1)
def go():
- s = select([t1], t1.c.c2 == t2.c.c1).apply_labels()
+ s = select(t1).where(t1.c.c2 == t2.c.c1).apply_labels()
s.compile(dialect=self.dialect)
go()
return [
(
- select([Parent.id, Child.id])
+ select(Parent.id, Child.id)
.select_from(ormjoin(Parent, Child, Parent.children))
.where(Child.id == 5)
)
column("description", String),
)
- q = select([table1.c.myid], order_by=[table1.c.myid]).alias("foo")
+ q = select(table1.c.myid).order_by(table1.c.myid).alias("foo")
crit = q.c.myid == table1.c.myid
self.assert_compile(
- select(["*"], crit),
+ select("*").where(crit),
"SELECT * FROM (SELECT mytable.myid AS "
"myid FROM mytable) AS foo, mytable WHERE "
"foo.myid = mytable.myid",
)
q = (
- select([table1.c.myid], order_by=[table1.c.myid])
+ select(table1.c.myid)
+ .order_by(table1.c.myid)
.limit(10)
.alias("foo")
)
crit = q.c.myid == table1.c.myid
self.assert_compile(
- select(["*"], crit),
+ select("*").where(crit),
"SELECT * FROM (SELECT TOP [POSTCOMPILE_param_1] mytable.myid AS "
"myid FROM mytable ORDER BY mytable.myid) AS foo, mytable WHERE "
"foo.myid = mytable.myid",
)
q = (
- select([table1.c.myid], order_by=[table1.c.myid])
+ select(table1.c.myid)
+ .order_by(table1.c.myid)
.offset(10)
.alias("foo")
)
crit = q.c.myid == table1.c.myid
self.assert_compile(
- select(["*"], crit),
+ select("*").where(crit),
"SELECT * FROM (SELECT anon_1.myid AS myid FROM "
"(SELECT mytable.myid AS myid, ROW_NUMBER() OVER (ORDER BY "
"mytable.myid) AS mssql_rn FROM mytable) AS anon_1 "
)
q = (
- select([table1.c.myid], order_by=[table1.c.myid])
+ select(table1.c.myid)
+ .order_by(table1.c.myid)
.offset(10)
.alias("foo")
)
dialect = mssql.dialect()
dialect._supports_offset_fetch = True
self.assert_compile(
- select(["*"], crit),
+ select("*").where(crit),
"SELECT * FROM (SELECT mytable.myid AS myid FROM mytable "
"ORDER BY mytable.myid OFFSET :param_1 ROWS) AS foo, "
"mytable WHERE foo.myid = mytable.myid",
)
q = (
- select([table1.c.myid], order_by=[table1.c.myid])
+ select(table1.c.myid)
+ .order_by(table1.c.myid)
.limit(10)
.offset(10)
.alias("foo")
dialect = mssql.dialect()
dialect._supports_offset_fetch = True
self.assert_compile(
- select(["*"], crit),
+ select("*").where(crit),
"SELECT * FROM (SELECT mytable.myid AS myid FROM mytable "
"ORDER BY mytable.myid OFFSET :param_1 ROWS "
"FETCH NEXT :param_2 ROWS ONLY ) AS foo, "
column("description", String),
)
- q = select(
- [table1.c.myid, sql.literal("bar").label("c1")],
- order_by=[table1.c.name + "-"],
- ).alias("foo")
+ q = (
+ select(table1.c.myid, sql.literal("bar").label("c1"))
+ .order_by(table1.c.name + "-")
+ .alias("foo")
+ )
crit = q.c.myid == table1.c.myid
dialect = mssql.dialect()
dialect.paramstyle = "qmark"
dialect.positional = True
self.assert_compile(
- select(["*"], crit),
+ select("*").where(crit),
"SELECT * FROM (SELECT mytable.myid AS "
"myid, ? AS c1 FROM mytable) AS foo, mytable WHERE "
"foo.myid = mytable.myid",
column("col4"),
)
s1, s2 = (
- select(
- [t1.c.col3.label("col3"), t1.c.col4.label("col4")],
+ select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
),
- select(
- [t2.c.col3.label("col3"), t2.c.col4.label("col4")],
+ select(t2.c.col3.label("col3"), t2.c.col4.label("col4")).where(
t2.c.col2.in_(["t2col2r2", "t2col2r3"]),
),
)
def test_column_subquery_to_alias(self):
a1 = self.t2.alias("a1")
- s = select([self.t2, select(a1.c.a).scalar_subquery()])
+ s = select(self.t2, select(a1.c.a).scalar_subquery())
self._assert_sql(
s,
"SELECT t2_1.a, t2_1.b, t2_1.c, "
dialect = self.__dialect__
def gen(distinct=None, prefixes=None):
- kw = {}
- if distinct is not None:
- kw["distinct"] = distinct
+ stmt = select(column("q"))
+ if distinct:
+ stmt = stmt.distinct()
if prefixes is not None:
- kw["prefixes"] = prefixes
- return str(select([column("q")], **kw).compile(dialect=dialect))
+ stmt = stmt.prefix_with(*prefixes)
+
+ return str(stmt.compile(dialect=dialect))
eq_(gen(None), "SELECT q")
eq_(gen(True), "SELECT DISTINCT q")
def test_any_literal(self, connection):
stuff = self.tables.stuff
- stmt = select([4 == any_(select(stuff.c.value).scalar_subquery())])
+ stmt = select(4 == any_(select(stuff.c.value).scalar_subquery()))
is_(connection.execute(stmt).scalar(), True)
def test_outer_join_one(self):
table1, table2, table3 = self._test_outer_join_fixture()
- query = select(
- [table1, table2],
- or_(
- table1.c.name == "fred",
- table1.c.myid == 10,
- table2.c.othername != "jack",
- text("EXISTS (select yay from foo where boo = lar)"),
- ),
- from_obj=[
+ query = (
+ select(table1, table2)
+ .where(
+ or_(
+ table1.c.name == "fred",
+ table1.c.myid == 10,
+ table2.c.othername != "jack",
+ text("EXISTS (select yay from foo where boo = lar)"),
+ )
+ )
+ .select_from(
outerjoin(table1, table2, table1.c.myid == table2.c.otherid)
- ],
+ )
)
self.assert_compile(
query,
series = func.generate_series(1, 100).alias("series")
series_col = column("series")
query = select(
- [func.array_agg(series_col).filter(series_col % 2 == 0)[3]]
+ func.array_agg(series_col).filter(series_col % 2 == 0)[3]
).select_from(series)
self.assert_compile(
query,
def test_plain_inline(self):
self.assert_compile(
- select([self.table], distinct=True),
+ select(self.table).distinct(),
"SELECT DISTINCT t.id, t.a, t.b FROM t",
)
def test_on_columns_inline_list(self):
self.assert_compile(
- select(
- [self.table], distinct=[self.table.c.a, self.table.c.b]
- ).order_by(self.table.c.a, self.table.c.b),
+ select(self.table)
+ .distinct(self.table.c.a, self.table.c.b)
+ .order_by(self.table.c.a, self.table.c.b),
"SELECT DISTINCT ON (t.a, t.b) t.id, "
"t.a, t.b FROM t ORDER BY t.a, t.b",
)
def test_on_columns_inline_scalar(self):
self.assert_compile(
- select([self.table], distinct=self.table.c.a),
+ select(self.table).distinct(self.table.c.a),
"SELECT DISTINCT ON (t.a) t.id, t.a, t.b FROM t",
)
eq_(
connection.execute(
select(
- [
- tuple_(
- literal_column("'a'"), literal_column("'b'")
- ).in_(
- [
- tuple_(
- *[
- literal_column("'%s'" % letter)
- for letter in elem
- ]
- )
- for elem in test
- ]
- )
- ]
+ tuple_(
+ literal_column("'a'"), literal_column("'b'")
+ ).in_(
+ [
+ tuple_(
+ *[
+ literal_column("'%s'" % letter)
+ for letter in elem
+ ]
+ )
+ for elem in test
+ ]
+ )
)
).scalar(),
exp,
assert somedate.tzinfo
connection.execute(tztable.insert(), id=1, name="row1", date=somedate)
row = connection.execute(
- select([tztable.c.date], tztable.c.id == 1)
+ select(tztable.c.date).where(tztable.c.id == 1)
).first()
eq_(row[0], somedate)
eq_(
notztable.insert(), id=1, name="row1", date=somedate
)
row = connection.execute(
- select([notztable.c.date], notztable.c.id == 1)
+ select(notztable.c.date).where(notztable.c.id == 1)
).first()
eq_(row[0], somedate)
eq_(row[0].tzinfo, None)
def test_array_int_index(self):
col = column("x", postgresql.ARRAY(Integer))
self.assert_compile(
- select([col[3]]),
+ select(col[3]),
"SELECT x[%(x_1)s] AS anon_1",
checkparams={"x_1": 3},
)
def test_array_contains(self):
col = column("x", postgresql.ARRAY(Integer))
self.assert_compile(
- select([col.contains(array([4, 5, 6]))]),
+ select(col.contains(array([4, 5, 6]))),
"SELECT x @> ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
"AS anon_1",
checkparams={"param_1": 4, "param_3": 6, "param_2": 5},
def test_array_contained_by(self):
col = column("x", postgresql.ARRAY(Integer))
self.assert_compile(
- select([col.contained_by(array([4, 5, 6]))]),
+ select(col.contained_by(array([4, 5, 6]))),
"SELECT x <@ ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
"AS anon_1",
checkparams={"param_1": 4, "param_3": 6, "param_2": 5},
def test_array_overlap(self):
col = column("x", postgresql.ARRAY(Integer))
self.assert_compile(
- select([col.overlap(array([4, 5, 6]))]),
+ select(col.overlap(array([4, 5, 6]))),
"SELECT x && ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
"AS anon_1",
checkparams={"param_1": 4, "param_3": 6, "param_2": 5},
def test_array_slice_index(self):
col = column("x", postgresql.ARRAY(Integer))
self.assert_compile(
- select([col[5:10]]),
+ select(col[5:10]),
"SELECT x[%(x_1)s:%(x_2)s] AS anon_1",
checkparams={"x_2": 10, "x_1": 5},
)
def test_array_dim_index(self):
col = column("x", postgresql.ARRAY(Integer, dimensions=2))
self.assert_compile(
- select([col[3][5]]),
+ select(col[3][5]),
"SELECT x[%(x_1)s][%(param_1)s] AS anon_1",
checkparams={"x_1": 3, "param_1": 5},
)
"""
stmt = select(
- [
- func.array_cat(
- array([1, 2, 3]),
- array([4, 5, 6]),
- type_=postgresql.ARRAY(Integer),
- )[2:5]
- ]
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer),
+ )[2:5]
)
self.assert_compile(
stmt,
stmt = select(func.array_agg(values_table.c.value))
eq_(connection.execute(stmt).scalar(), list(range(1, 10)))
- stmt = select([func.array_agg(values_table.c.value)[3]])
+ stmt = select(func.array_agg(values_table.c.value)[3])
eq_(connection.execute(stmt).scalar(), 3)
- stmt = select([func.array_agg(values_table.c.value)[2:4]])
+ stmt = select(func.array_agg(values_table.c.value)[2:4])
eq_(connection.execute(stmt).scalar(), [2, 3, 4])
def test_array_index_slice_exprs(self, connection):
"""test a variety of expressions that sometimes need parenthesizing"""
- stmt = select([array([1, 2, 3, 4])[2:3]])
+ stmt = select(array([1, 2, 3, 4])[2:3])
eq_(connection.execute(stmt).scalar(), [2, 3])
- stmt = select([array([1, 2, 3, 4])[2]])
+ stmt = select(array([1, 2, 3, 4])[2])
eq_(connection.execute(stmt).scalar(), 2)
- stmt = select([(array([1, 2]) + array([3, 4]))[2:3]])
+ stmt = select((array([1, 2]) + array([3, 4]))[2:3])
eq_(connection.execute(stmt).scalar(), [2, 3])
- stmt = select([array([1, 2]) + array([3, 4])[2:3]])
+ stmt = select(array([1, 2]) + array([3, 4])[2:3])
eq_(connection.execute(stmt).scalar(), [1, 2, 4])
- stmt = select([array([1, 2])[2:3] + array([3, 4])])
+ stmt = select(array([1, 2])[2:3] + array([3, 4]))
eq_(connection.execute(stmt).scalar(), [2, 3, 4])
stmt = select(
- [
- func.array_cat(
- array([1, 2, 3]),
- array([4, 5, 6]),
- type_=self.ARRAY(Integer),
- )[2:5]
- ]
+ func.array_cat(
+ array([1, 2, 3]), array([4, 5, 6]), type_=self.ARRAY(Integer),
+ )[2:5]
)
eq_(connection.execute(stmt).scalar(), [2, 3, 4, 5])
def test_any_all_exprs_array(self, connection):
stmt = select(
- [
- 3
- == any_(
- func.array_cat(
- array([1, 2, 3]),
- array([4, 5, 6]),
- type_=self.ARRAY(Integer),
- )
+ 3
+ == any_(
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=self.ARRAY(Integer),
)
- ]
+ )
)
eq_(connection.execute(stmt).scalar(), True)
strarr=[util.u("abc"), util.u("def")],
)
results = connection.execute(
- select([arrtable.c.intarr + [4, 5, 6]])
+ select(arrtable.c.intarr + [4, 5, 6])
).fetchall()
eq_(len(results), 1)
eq_(results[0][0], [1, 2, 3, 4, 5, 6])
def test_array_literal_roundtrip(self, connection):
eq_(
connection.scalar(
- select(
- [postgresql.array([1, 2]) + postgresql.array([3, 4, 5])]
- )
+ select(postgresql.array([1, 2]) + postgresql.array([3, 4, 5]))
),
[1, 2, 3, 4, 5],
)
eq_(
connection.scalar(
select(
- [
- (
- postgresql.array([1, 2])
- + postgresql.array([3, 4, 5])
- )[3]
- ]
+ (postgresql.array([1, 2]) + postgresql.array([3, 4, 5]))[3]
)
),
3,
eq_(
connection.scalar(
select(
- [
- (
- postgresql.array([1, 2])
- + postgresql.array([3, 4, 5])
- )[2:4]
+ (postgresql.array([1, 2]) + postgresql.array([3, 4, 5]))[
+ 2:4
]
)
),
eq_(
connection.scalar(
select(
- [
- postgresql.array(
- [
- postgresql.array([1, 2]),
- postgresql.array([3, 4]),
- ]
- )
- ]
+ postgresql.array(
+ [postgresql.array([1, 2]), postgresql.array([3, 4])]
+ )
)
),
[[1, 2], [3, 4]],
eq_(
connection.scalar(
select(
- [
- postgresql.array(
- [
- postgresql.array([1, 2]),
- postgresql.array([3, 4]),
- ]
- )[2][1]
- ]
+ postgresql.array(
+ [postgresql.array([1, 2]), postgresql.array([3, 4])]
+ )[2][1]
)
),
3,
def test_array_literal_compare(self, connection):
eq_(
- connection.scalar(select([postgresql.array([1, 2]) < [3, 4, 5]])),
+ connection.scalar(select(postgresql.array([1, 2]) < [3, 4, 5])),
True,
)
def test_array_getitem_single_exec(self, connection):
arrtable = self.tables.arrtable
self._fixture_456(arrtable)
- eq_(connection.scalar(select([arrtable.c.intarr[2]])), 5)
+ eq_(connection.scalar(select(arrtable.c.intarr[2])), 5)
connection.execute(arrtable.update().values({arrtable.c.intarr[2]: 7}))
- eq_(connection.scalar(select([arrtable.c.intarr[2]])), 7)
+ eq_(connection.scalar(select(arrtable.c.intarr[2])), 7)
def test_array_getitem_slice_exec(self, connection):
arrtable = self.tables.arrtable
intarr=[4, 5, 6],
strarr=[util.u("abc"), util.u("def")],
)
- eq_(connection.scalar(select([arrtable.c.intarr[2:3]])), [5, 6])
+ eq_(connection.scalar(select(arrtable.c.intarr[2:3])), [5, 6])
connection.execute(
arrtable.update().values({arrtable.c.intarr[2:3]: [7, 8]})
)
- eq_(connection.scalar(select([arrtable.c.intarr[2:3]])), [7, 8])
+ eq_(connection.scalar(select(arrtable.c.intarr[2:3])), [7, 8])
def test_multi_dim_roundtrip(self, connection):
arrtable = self.tables.arrtable
connection.execute(arrtable.insert(), intarr=[6, 5, 4])
eq_(
connection.scalar(
- select([arrtable.c.intarr.contained_by([4, 5, 6, 7])])
+ select(arrtable.c.intarr.contained_by([4, 5, 6, 7]))
),
True,
)
def _test_fixed_round_trip(self, engine):
with engine.begin() as conn:
s = select(
- [
- hstore(
- array(["key1", "key2", "key3"]),
- array(["value1", "value2", "value3"]),
- )
- ]
+ hstore(
+ array(["key1", "key2", "key3"]),
+ array(["value1", "value2", "value3"]),
+ )
)
eq_(
conn.scalar(s),
def _test_unicode_round_trip(self, engine):
with engine.begin() as conn:
s = select(
- [
- hstore(
- array(
- [
- util.u("réveillé"),
- util.u("drôle"),
- util.u("S’il"),
- ]
- ),
- array(
- [
- util.u("réveillé"),
- util.u("drôle"),
- util.u("S’il"),
- ]
- ),
- )
- ]
+ hstore(
+ array(
+ [util.u("réveillé"), util.u("drôle"), util.u("S’il")]
+ ),
+ array(
+ [util.u("réveillé"), util.u("drôle"), util.u("S’il")]
+ ),
+ )
)
eq_(
conn.scalar(s),
self._fixture_data(engine)
data_table = self.tables.data_table
result = connection.execute(
- select([data_table.c.data["k1"].astext])
+ select(data_table.c.data["k1"].astext)
).first()
if engine.dialect.returns_unicode_strings:
assert isinstance(result[0], util.text_type)
self._fixture_data(engine)
data_table = self.tables.data_table
result = connection.execute(
- select([data_table.c.data["k3"].astext.cast(Integer)]).where(
+ select(data_table.c.data["k3"].astext.cast(Integer)).where(
data_table.c.name == "r5"
)
).first()
metadata.create_all()
t.insert(values=dict(name="dante")).execute()
t.insert(values=dict(name="alighieri")).execute()
- select(
- [func.count(t.c.id)], func.length(t.c.name) == 5
+ select(func.count(t.c.id)).where(
+ func.length(t.c.name) == 5
).execute().first()[0] == 1
def test_version_parsing(self):
with testing.db.connect() as conn:
conn.execute(sqlite_json.insert(), foo=value)
- eq_(
- conn.scalar(select([sqlite_json.c.foo["json"]])), value["json"]
- )
+ eq_(conn.scalar(select(sqlite_json.c.foo["json"])), value["json"])
@testing.provide_metadata
def test_deprecated_serializer_args(self):
):
testing.db.execute(stmt)
- stmt = select([table])
+ stmt = select(table)
with testing.expect_deprecated_20(
r"The Engine.execute\(\) function/method is considered legacy",
):
):
stmt.execute()
- stmt = select([table])
+ stmt = select(table)
with testing.expect_deprecated_20(
r"The Executable.execute\(\) function/method is considered legacy",
):
def select1(db):
- return str(select([1]).compile(dialect=db.dialect))
+ return str(select(1).compile(dialect=db.dialect))
class DeprecatedEngineFeatureTest(fixtures.TablesTest):
r"The argument signature for the "
r"\"ConnectionEvents.before_execute\" event listener",
):
- engine.execute(select([1]))
+ engine.execute(select(1))
eq_(canary, ["execute", "cursor_execute"])
def test_argument_format_execute(self):
r"The argument signature for the "
r"\"ConnectionEvents.after_execute\" event listener",
):
- e1.execute(select([1]))
+ e1.execute(select(1))
m1 = Mock()
def select1(db):
- return str(select([1]).compile(dialect=db.dialect))
+ return str(select(1).compile(dialect=db.dialect))
with testing.db.connect() as conn:
event.listen(conn, "before_execute", m1.before_execute)
r"foo.data = \? OR foo.data = \?\]\n"
r"\[SQL parameters hidden due to hide_parameters=True\]",
conn.execute,
- select([foo]).where(
+ select(foo).where(
or_(
foo.c.data == bindparam("the_data_1"),
foo.c.data == bindparam("the_data_2"),
__requires__ = ("ad_hoc_engines",)
def _assert_names_in_execute(self, eng, eng_name, pool_name):
- eng.execute(select([1]))
+ eng.execute(select(1))
assert self.buf.buffer
for name in [b.name for b in self.buf.buffer]:
assert name in (
)
def _assert_no_name_in_execute(self, eng):
- eng.execute(select([1]))
+ eng.execute(select(1))
assert self.buf.buffer
for name in [b.name for b in self.buf.buffer]:
assert name in (
def test_named_logger_names_after_dispose(self):
eng = self._named_engine()
- eng.execute(select([1]))
+ eng.execute(select(1))
eng.dispose()
eq_(eng.logging_name, "myenginename")
eq_(eng.pool.logging_name, "mypoolname")
def test_named_logger_execute_after_dispose(self):
eng = self._named_engine()
- eng.execute(select([1]))
+ eng.execute(select(1))
eng.dispose()
self._assert_names_in_execute(eng, "myenginename", "mypoolname")
# do an initial execute to clear out 'first connect'
# messages
- e.execute(select([10])).close()
+ e.execute(select(10)).close()
self.buf.flush()
return e
e2 = self._testing_engine()
e1.echo = True
- e1.execute(select([1])).close()
- e2.execute(select([2])).close()
+ e1.execute(select(1)).close()
+ e2.execute(select(2)).close()
e1.echo = False
- e1.execute(select([3])).close()
- e2.execute(select([4])).close()
+ e1.execute(select(3)).close()
+ e2.execute(select(4)).close()
e2.echo = True
- e1.execute(select([5])).close()
- e2.execute(select([6])).close()
+ e1.execute(select(5)).close()
+ e2.execute(select(6)).close()
assert self.buf.buffer[0].getMessage().startswith("SELECT 1")
assert self.buf.buffer[2].getMessage().startswith("SELECT 6")
event.listen(engine, "connect", listen_three)
event.listen(engine.__class__, "connect", listen_four)
- engine.execute(select([1])).close()
+ engine.execute(select(1)).close()
eq_(
canary, ["listen_one", "listen_four", "listen_two", "listen_three"]
)
autoload_with=testing.db,
)
u2 = Table("users", meta2, autoload_with=testing.db)
- s = sa.select([a2]).subquery()
+ s = sa.select(a2).subquery()
assert s.c.user_id is not None
assert len(a2.foreign_keys) == 1
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
autoload_with=testing.db,
)
- s = sa.select([a2]).subquery()
+ s = sa.select(a2).subquery()
assert s.c.user_id is not None
assert len(a2.foreign_keys) == 1
transaction.commit()
eq_(
connection.execute(
- select([users.c.user_id]).order_by(users.c.user_id)
+ select(users.c.user_id).order_by(users.c.user_id)
).fetchall(),
[(1,), (3,)],
)
transaction.commit()
eq_(
connection.execute(
- select([users.c.user_id]).order_by(users.c.user_id)
+ select(users.c.user_id).order_by(users.c.user_id)
).fetchall(),
[(1,), (2,), (3,)],
)
transaction.commit()
eq_(
connection.execute(
- select([users.c.user_id]).order_by(users.c.user_id)
+ select(users.c.user_id).order_by(users.c.user_id)
).fetchall(),
[(1,), (4,)],
)
transaction.close()
eq_(
connection.execute(
- select([users.c.user_id]).order_by(users.c.user_id)
+ select(users.c.user_id).order_by(users.c.user_id)
).fetchall(),
[(1,), (2,)],
)
transaction.commit()
eq_(
connection.execute(
- select([users.c.user_id]).order_by(users.c.user_id)
+ select(users.c.user_id).order_by(users.c.user_id)
).fetchall(),
[(1,), (2,), (5,)],
)
with testing.db.connect() as connection2:
eq_(
connection2.execution_options(autocommit=True)
- .execute(select([users.c.user_id]).order_by(users.c.user_id))
+ .execute(select(users.c.user_id).order_by(users.c.user_id))
.fetchall(),
[],
)
connection2.commit_prepared(transaction.xid, recover=True)
eq_(
connection2.execute(
- select([users.c.user_id]).order_by(users.c.user_id)
+ select(users.c.user_id).order_by(users.c.user_id)
).fetchall(),
[(1,)],
)
xa.prepare()
xa.commit()
result = conn.execute(
- select([users.c.user_name]).order_by(users.c.user_id)
+ select(users.c.user_name).order_by(users.c.user_id)
)
eq_(result.fetchall(), [("user1",), ("user4",)])
with eng.connect() as conn:
result = conn.execute(
- select([users.c.user_name]).order_by(users.c.user_id)
+ select(users.c.user_name).order_by(users.c.user_id)
)
eq_(result.fetchall(), [])
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- conn1.execute(select([func.insert_foo("data1")]))
- assert conn2.execute(select([foo.c.data])).fetchall() == []
+ conn1.execute(select(func.insert_foo("data1")))
+ assert conn2.execute(select(foo.c.data)).fetchall() == []
conn1.execute(text("select insert_foo('moredata')"))
- assert conn2.execute(select([foo.c.data])).fetchall() == []
+ assert conn2.execute(select(foo.c.data)).fetchall() == []
trans = conn1.begin()
trans.commit()
- assert conn2.execute(select([foo.c.data])).fetchall() == [
+ assert conn2.execute(select(foo.c.data)).fetchall() == [
("data1",),
("moredata",),
]
conn1 = testing.db.connect()
conn2 = testing.db.connect()
conn1.execute(
- select([func.insert_foo("data1")]).execution_options(
- autocommit=True
- )
+ select(func.insert_foo("data1")).execution_options(autocommit=True)
)
- assert conn2.execute(select([foo.c.data])).fetchall() == [("data1",)]
+ assert conn2.execute(select(foo.c.data)).fetchall() == [("data1",)]
conn1.close()
conn2.close()
conn1 = testing.db.connect()
conn2 = testing.db.connect()
conn1.execution_options(autocommit=True).execute(
- select([func.insert_foo("data1")])
+ select(func.insert_foo("data1"))
)
- eq_(conn2.execute(select([foo.c.data])).fetchall(), [("data1",)])
+ eq_(conn2.execute(select(foo.c.data)).fetchall(), [("data1",)])
# connection supersedes statement
conn1.execution_options(autocommit=False).execute(
- select([func.insert_foo("data2")]).execution_options(
- autocommit=True
- )
+ select(func.insert_foo("data2")).execution_options(autocommit=True)
)
- eq_(conn2.execute(select([foo.c.data])).fetchall(), [("data1",)])
+ eq_(conn2.execute(select(foo.c.data)).fetchall(), [("data1",)])
# ditto
conn1.execution_options(autocommit=True).execute(
- select([func.insert_foo("data3")]).execution_options(
+ select(func.insert_foo("data3")).execution_options(
autocommit=False
)
)
eq_(
- conn2.execute(select([foo.c.data])).fetchall(),
+ conn2.execute(select(foo.c.data)).fetchall(),
[("data1",), ("data2",), ("data3",)],
)
conn1.close()
autocommit=True
)
)
- assert conn2.execute(select([foo.c.data])).fetchall() == [
- ("moredata",)
- ]
+ assert conn2.execute(select(foo.c.data)).fetchall() == [("moredata",)]
conn1.close()
conn2.close()
conn1 = testing.db.connect()
conn2 = testing.db.connect()
conn1.execute(text("insert into foo (data) values ('implicitdata')"))
- assert conn2.execute(select([foo.c.data])).fetchall() == [
+ assert conn2.execute(select(foo.c.data)).fetchall() == [
("implicitdata",)
]
conn1.close()
r"on Connection.execution_options\(\), or "
r"per-engine using the isolation_level "
r"argument to create_engine\(\).",
- select([1]).execution_options,
+ select(1).execution_options,
isolation_level=self._non_default_isolation_level(),
)
canary = mock.Mock()
with testing.db.connect() as conn:
event.listen(conn, "rollback", canary)
- conn.execute(select([1]))
+ conn.execute(select(1))
assert conn.in_transaction()
eq_(canary.mock_calls, [mock.call(conn)])
canary = mock.Mock()
with testing.db.connect() as conn:
event.listen(conn, "rollback", canary)
- conn.execute(select([1]))
+ conn.execute(select(1))
conn.rollback()
assert not conn.in_transaction()
try:
with testing.db.connect() as conn:
event.listen(conn, "rollback", canary)
- conn.execute(select([1]))
+ conn.execute(select(1))
assert conn.in_transaction()
raise Exception("some error")
assert False
exc.PendingRollbackError,
"Can't reconnect",
conn.execute,
- select([1]),
+ select(1),
)
conn.rollback()
user_id = Column("user_id", Integer, ForeignKey("users.id"))
User.address_count = sa.orm.column_property(
- sa.select([sa.func.count(Address.id)])
+ sa.select(sa.func.count(Address.id))
.where(Address.user_id == User.id)
.scalar_subquery()
)
# this doesn't really gain us anything. but if
# one is used, lets have it function as expected...
return sa.orm.column_property(
- sa.select([sa.func.count(Address.id)])
+ sa.select(sa.func.count(Address.id))
.where(Address.user_id == cls.id)
.scalar_subquery()
)
name = Column("name", String(50))
adr_count = sa.orm.column_property(
- sa.select(
- [sa.func.count(Address.id)], Address.user_id == id
- ).scalar_subquery()
+ sa.select(sa.func.count(Address.id))
+ .where(Address.user_id == id)
+ .scalar_subquery()
)
addresses = relationship(Address)
)
User.address_count = sa.orm.column_property(
- sa.select([sa.func.count(Address.id)])
+ sa.select(sa.func.count(Address.id))
.where(Address.user_id == User.id)
.scalar_subquery()
)
def address_count(cls):
counter(cls.id)
return column_property(
- select([func.count(Address.id)])
+ select(func.count(Address.id))
.where(Address.user_id == cls.id)
.scalar_subquery()
)
return ">>%s<<" % thingy.name
self.assert_compile(
- select([column("foo"), MyThingy()]), "SELECT foo, >>MYTHINGY!<<"
+ select(column("foo"), MyThingy()), "SELECT foo, >>MYTHINGY!<<"
)
self.assert_compile(
- select([MyThingy("x"), MyThingy("y")]).where(MyThingy() == 5),
+ select(MyThingy("x"), MyThingy("y")).where(MyThingy() == 5),
"SELECT >>x<<, >>y<< WHERE >>MYTHINGY!<< = :MYTHINGY!_1",
)
return str(compiler.counter)
self.assert_compile(
- select([column("foo"), MyThingy()]).order_by(desc(MyThingy())),
+ select(column("foo"), MyThingy()).order_by(desc(MyThingy())),
"SELECT foo, 1 ORDER BY 2 DESC",
)
self.assert_compile(
- select([MyThingy(), MyThingy()]).where(MyThingy() == 5),
+ select(MyThingy(), MyThingy()).where(MyThingy() == 5),
"SELECT 1, 2 WHERE 3 = :MYTHINGY!_1",
)
t1 = table("mytable", column("x"), column("y"), column("z"))
self.assert_compile(
- InsertFromSelect(t1, select([t1]).where(t1.c.x > 5)),
+ InsertFromSelect(t1, select(t1).where(t1.c.x > 5)),
"INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z "
"FROM mytable WHERE mytable.x > :x_1)",
)
def compile_(element, compiler, **kw):
return "OVERRIDE"
- s1 = select([t1])
+ s1 = select(t1)
self.assert_compile(s1, "OVERRIDE")
self.assert_compile(s1._annotate({}), "OVERRIDE")
finally:
return "FOO" + element.name
self.assert_compile(
- select([Sub1(), Sub2()]),
+ select(Sub1(), Sub2()),
"SELECT FOOsub1 AS sub1_1, sub2 AS sub2_1",
use_default_dialect=True,
)
name = "subsub1"
self.assert_compile(
- select([Sub1(), Sub2(), SubSub1()]),
+ select(Sub1(), Sub2(), SubSub1()),
"SELECT sub1 AS sub1_1, sub2 AS sub2_1, subsub1 AS subsub1_1",
use_default_dialect=True,
)
return "FOO" + element.name
self.assert_compile(
- select([Sub1(), Sub2(), SubSub1()]),
+ select(Sub1(), Sub2(), SubSub1()),
"SELECT FOOsub1 AS sub1_1, sub2 AS sub2_1, "
"FOOsubsub1 AS subsub1_1",
use_default_dialect=True,
def _test_result_map_population(self, expression):
lc1 = literal_column("1")
lc2 = literal_column("2")
- stmt = select([lc1, expression, lc2])
+ stmt = select(lc1, expression, lc2)
compiled = stmt.compile()
eq_(
def compile_(element, compiler, **kw):
return "OVERRIDE"
- s1 = select([t1])
+ s1 = select(t1)
self.assert_compile(s1, "SELECT t1.c1, t1.c2 FROM t1")
from sqlalchemy.dialects.sqlite import base as sqlite
)
def test_expression(self):
- expr = select([users]).select_from(users.join(addresses)).limit(5)
+ expr = select(users).select_from(users.join(addresses)).limit(5)
re_expr = serializer.loads(
serializer.dumps(expr, -1), users.metadata, None
)
def test_annotated_one(self):
j = join(users, addresses)._annotate({"foo": "bar"})
- query = select([addresses]).select_from(j)
+ query = select(addresses).select_from(j)
str(query)
for prot in pickle_protocols():
ue("\u6e2c\u8a66"), m, Column(ue("\u6e2c\u8a66_id"), Integer)
)
- expr = select([t]).where(t.c[ue("\u6e2c\u8a66_id")] == 5)
+ expr = select(t).where(t.c[ue("\u6e2c\u8a66_id")] == 5)
expr2 = serializer.loads(serializer.dumps(expr, -1), m)
"Manager": table_Employee.join(table_Engineer).join(
table_Manager
),
- "Engineer": select(
- [table_Employee, table_Engineer.c.machine],
- table_Employee.c.atype == "Engineer",
- from_obj=[table_Employee.join(table_Engineer)],
- ).subquery(),
+ "Engineer": select(table_Employee, table_Engineer.c.machine)
+ .where(table_Employee.c.atype == "Engineer")
+ .select_from(table_Employee.join(table_Engineer))
+ .subquery(),
"Employee": table_Employee.select(
table_Employee.c.atype == "Employee"
).subquery(),
"Manager": table_Employee.join(table_Engineer).join(
table_Manager
),
- "Engineer": select(
- [table_Employee, table_Engineer.c.machine],
- table_Employee.c.atype == "Engineer",
- from_obj=[table_Employee.join(table_Engineer)],
- ).subquery(),
+ "Engineer": select(table_Employee, table_Engineer.c.machine)
+ .where(table_Employee.c.atype == "Engineer")
+ .select_from(table_Employee.join(table_Engineer))
+ .subquery(),
},
None,
"pu_engineer",
self.classes.C,
self.classes.D,
)
- poly_select = select(
- [tablea, tableb.c.data.label("discriminator")],
- from_obj=tablea.join(tableb),
- ).alias("poly")
+ poly_select = (
+ select(tablea, tableb.c.data.label("discriminator"))
+ .select_from(tablea.join(tableb))
+ .alias("poly")
+ )
mapper(B, tableb)
mapper(
def test_polymorphic_on_not_present_col(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
- t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
+ t1t2_join = select(t1.c.x).select_from(t1.join(t2)).alias()
def go():
- t1t2_join_2 = select([t1.c.q], from_obj=[t1.join(t2)]).alias()
+ t1t2_join_2 = select(t1.c.q).select_from(t1.join(t2)).alias()
mapper(
Parent,
t2,
def test_polymorphic_on_only_in_with_poly(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
- t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
+ t1t2_join = select(t1.c.x).select_from(t1.join(t2)).alias()
# if its in the with_polymorphic, then its OK
mapper(
Parent,
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
- t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
+ t1t2_join = select(t1.c.x).select_from(t1.join(t2)).alias()
# if with_polymorphic, but its not present, not OK
def go():
- t1t2_join_2 = select([t1.c.q], from_obj=[t1.join(t2)]).alias()
+ t1t2_join_2 = select(t1.c.q).select_from(t1.join(t2)).alias()
mapper(
Parent,
t2,
# not orm style correct query
print("Obtaining correct results without orm")
result = (
- sa.select(
- [tests.c.id, categories.c.name],
+ sa.select(tests.c.id, categories.c.name)
+ .where(
sa.and_(
tests.c.owner_id == 1,
sa.or_(
options.c.someoption == None, # noqa
options.c.someoption == False,
),
- ),
- order_by=[tests.c.id],
- from_obj=[
- tests.join(categories).outerjoin(
- options,
- sa.and_(
- tests.c.id == options.c.test_id,
- tests.c.owner_id == options.c.owner_id,
- ),
- )
- ],
+ )
+ )
+ .order_by(tests.c.id)
+ .select_from(
+ tests.join(categories).outerjoin(
+ options,
+ sa.and_(
+ tests.c.id == options.c.test_id,
+ tests.c.owner_id == options.c.owner_id,
+ ),
+ )
)
.execute()
.fetchall()
)
session.flush()
- arb_data = sa.select(
- [stats.c.data_id, sa.func.max(stats.c.somedata).label("max")],
- stats.c.data_id <= 5,
- group_by=[stats.c.data_id],
+ arb_data = (
+ sa.select(
+ stats.c.data_id, sa.func.max(stats.c.somedata).label("max")
+ )
+ .where(stats.c.data_id <= 5)
+ .group_by(stats.c.data_id)
)
arb_result = arb_data.execute().fetchall()
mapper(Task_Type, task_type)
j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id)
- jj = sa.select(
- [
+ jj = (
+ sa.select(
task.c.id.label("task_id"),
sa.func.count(msg.c.id).label("props_cnt"),
- ],
- from_obj=[j],
- group_by=[task.c.id],
- ).alias("prop_c_s")
+ )
+ .select_from(j)
+ .group_by(task.c.id)
+ .alias("prop_c_s")
+ )
jjj = sa.join(task, jj, task.c.id == jj.c.task_id)
mapper(
def __composite_values__(self):
return self
- desc_values = select(
- [values, descriptions.c.d1, descriptions.c.d2],
- descriptions.c.id == values.c.description_id,
- ).alias("descriptions_values")
+ desc_values = (
+ select(values, descriptions.c.d1, descriptions.c.d2)
+ .where(descriptions.c.id == values.c.description_id,)
+ .alias("descriptions_values")
+ )
mapper(
Descriptions,
# tests the LIMIT/OFFSET aliasing on a mapper
# against a select. original issue from ticket #904
- sel = sa.select(
- [users, addresses.c.email_address],
- users.c.id == addresses.c.user_id,
- ).alias("useralias")
+ sel = (
+ sa.select(users, addresses.c.email_address)
+ .where(users.c.id == addresses.c.user_id,)
+ .alias("useralias")
+ )
mapper(
User,
sel,
self.classes.Order,
)
- max_orders_by_user = sa.select(
- [sa.func.max(orders.c.id).label("order_id")],
- group_by=[orders.c.user_id],
- ).alias("max_orders_by_user")
+ max_orders_by_user = (
+ sa.select(sa.func.max(orders.c.id).label("order_id"))
+ .group_by(orders.c.user_id)
+ .alias("max_orders_by_user")
+ )
max_orders = orders.select(
orders.c.id == max_orders_by_user.c.order_id
self.classes.Order,
)
- s = sa.select([orders], orders.c.isopen == 1).alias("openorders")
+ s = sa.select(orders).where(orders.c.isopen == 1).alias("openorders")
mapper(
Order, s, properties={"user": relationship(User, lazy="joined")}
tag_score = tags_table.c.score1 * tags_table.c.score2
user_score = sa.select(
- [sa.func.sum(tags_table.c.score1 * tags_table.c.score2)],
- tags_table.c.user_id == users_table.c.id,
- )
+ sa.func.sum(tags_table.c.score1 * tags_table.c.score2)
+ ).where(tags_table.c.user_id == users_table.c.id,)
if labeled:
tag_score = tag_score.label(labelname)
def test_scalar_subquery_select_auto_correlate(self):
addresses, users = self.tables.addresses, self.tables.users
- query = select(
- [func.count(addresses.c.id)], addresses.c.user_id == users.c.id
- ).scalar_subquery()
+ query = (
+ select(func.count(addresses.c.id))
+ .where(addresses.c.user_id == users.c.id)
+ .scalar_subquery()
+ )
query = select(users.c.name.label("users_name"), query)
self.assert_compile(
query, self.query_correlated, dialect=default.DefaultDialect()
def test_scalar_subquery_select_explicit_correlate(self):
addresses, users = self.tables.addresses, self.tables.users
query = (
- select(
- [func.count(addresses.c.id)], addresses.c.user_id == users.c.id
- )
+ select(func.count(addresses.c.id))
+ .where(addresses.c.user_id == users.c.id)
.correlate(users)
.scalar_subquery()
)
def test_scalar_subquery_select_correlate_off(self):
addresses, users = self.tables.addresses, self.tables.users
query = (
- select(
- [func.count(addresses.c.id)], addresses.c.user_id == users.c.id
- )
+ select(func.count(addresses.c.id))
+ .where(addresses.c.user_id == users.c.id)
.correlate(None)
.scalar_subquery()
)
query, self.query_correlated, dialect=default.DefaultDialect()
)
- def test_scalar_subquery_query_correlate_off(self):
+ @testing.combinations(False, None)
+ def test_scalar_subquery_query_correlate_off(self, value):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = (
sess.query(func.count(Address.id))
.filter(Address.user_id == User.id)
- .correlate(None)
+ .correlate(value)
.scalar_subquery()
)
query = sess.query(User.name, query)
assert q.all() == expected
# test with a straight statement
- s = select(
- [
+ s = (
+ select(
users,
func.count(addresses.c.id).label("count"),
("Name:" + users.c.name).label("concat"),
- ],
- from_obj=[users.outerjoin(addresses)],
- group_by=[c for c in users.c],
- order_by=[users.c.id],
+ )
+ .select_from(users.outerjoin(addresses))
+ .group_by(*[c for c in users.c])
+ .order_by(users.c.id)
)
q = create_session().query(User)
result = (
properties={
"concat": column_property((users.c.id * 2)),
"count": column_property(
- select(
- [func.count(addresses.c.id)],
- users.c.id == addresses.c.user_id,
- )
+ select(func.count(addresses.c.id))
+ .where(users.c.id == addresses.c.user_id,)
.correlate(users)
.scalar_subquery()
),
),
"concat": column_property((users.c.id * 2)),
"count": column_property(
- select(
- [func.count(addresses.c.id)],
- users.c.id == addresses.c.user_id,
- )
+ select(func.count(addresses.c.id))
+ .where(users.c.id == addresses.c.user_id,)
.correlate(users)
.scalar_subquery()
),
self.classes.User,
)
- s = sa.select(
- [
+ s = (
+ sa.select(
users,
(users.c.id * 2).label("concat"),
sa.func.count(addresses.c.id).label("count"),
- ],
- users.c.id == addresses.c.user_id,
- group_by=[c for c in users.c],
- ).alias("myselect")
+ )
+ .where(users.c.id == addresses.c.user_id)
+ .group_by(*[c for c in users.c])
+ .alias("myselect")
+ )
mapper(User, s)
sess = create_session()
class LineItem(fixtures.BasicEntity):
pass
- container_select = sa.select(
- [items.c.policyNum, items.c.policyEffDate, items.c.type],
- distinct=True,
- ).alias("container_select")
+ container_select = (
+ sa.select(items.c.policyNum, items.c.policyEffDate, items.c.type)
+ .distinct()
+ .alias("container_select")
+ )
mapper(LineItem, items)
self.classes.Order,
)
- max_orders_by_user = sa.select(
- [sa.func.max(orders.c.id).label("order_id")],
- group_by=[orders.c.user_id],
- ).alias("max_orders_by_user")
+ max_orders_by_user = (
+ sa.select(sa.func.max(orders.c.id).label("order_id"))
+ .group_by(orders.c.user_id)
+ .alias("max_orders_by_user")
+ )
max_orders = orders.select(
orders.c.id == max_orders_by_user.c.order_id
self.classes.Order,
)
- max_orders_by_user = sa.select(
- [sa.func.max(orders.c.id).label("order_id")],
- group_by=[orders.c.user_id],
- ).alias("max_orders_by_user")
+ max_orders_by_user = (
+ sa.select(sa.func.max(orders.c.id).label("order_id"))
+ .group_by(orders.c.user_id)
+ .alias("max_orders_by_user")
+ )
max_orders = orders.select(
orders.c.id == max_orders_by_user.c.order_id
),
)
- result = sa.select(
- [users, addresses],
- sa.and_(users.c.id == addresses.c.user_id, addresses.c.id == a.id),
- ).execute()
+ result = (
+ sa.select(users, addresses)
+ .where(
+ sa.and_(
+ users.c.id == addresses.c.user_id, addresses.c.id == a.id
+ ),
+ )
+ .execute()
+ )
eq_(
list(result.first()),
[a.user.id, "asdf8d", a.id, a.user_id, "theater@foo.com"],
expr = decimal.Decimal("15.7563")
value = e.scalar(
- select([literal(expr)])
+ select(literal(expr))
)
assert value == expr
@testing.requires.subqueries
def test_case(self):
inner = select(
- [
- case(
- (info_table.c.pk < 3, "lessthan3"),
- (and_(info_table.c.pk >= 3, info_table.c.pk < 7), "gt3"),
- ).label("x"),
- info_table.c.pk,
- info_table.c.info,
- ],
- from_obj=[info_table],
- )
+ case(
+ (info_table.c.pk < 3, "lessthan3"),
+ (and_(info_table.c.pk >= 3, info_table.c.pk < 7), "gt3"),
+ ).label("x"),
+ info_table.c.pk,
+ info_table.c.info,
+ ).select_from(info_table)
inner_result = inner.execute().fetchall()
],
)
- outer = select([inner.alias("q_inner")])
+ outer = select(inner.alias("q_inner"))
outer_result = outer.execute().fetchall()
]
w_else = select(
- [
- case(
- [info_table.c.pk < 3, cast(3, Integer)],
- [and_(info_table.c.pk >= 3, info_table.c.pk < 6), 6],
- else_=0,
- ).label("x"),
- info_table.c.pk,
- info_table.c.info,
- ],
- from_obj=[info_table],
- )
+ case(
+ [info_table.c.pk < 3, cast(3, Integer)],
+ [and_(info_table.c.pk >= 3, info_table.c.pk < 6), 6],
+ else_=0,
+ ).label("x"),
+ info_table.c.pk,
+ info_table.c.info,
+ ).select_from(info_table)
else_result = w_else.execute().fetchall()
for s in [
select(
- [
- case(
- (info_table.c.info == "pk_4_data", text("'yes'")),
- else_=text("'no'"),
- )
- ]
+ case(
+ (info_table.c.info == "pk_4_data", text("'yes'")),
+ else_=text("'no'"),
+ )
).order_by(info_table.c.info),
select(
- [
- case(
- (
- info_table.c.info == "pk_4_data",
- literal_column("'yes'"),
- ),
- else_=literal_column("'no'"),
- )
- ]
+ case(
+ (
+ info_table.c.info == "pk_4_data",
+ literal_column("'yes'"),
+ ),
+ else_=literal_column("'no'"),
+ )
).order_by(info_table.c.info),
]:
eq_(
def testcase_with_dict(self):
query = select(
- [
- case(
- {
- info_table.c.pk < 3: "lessthan3",
- info_table.c.pk >= 3: "gt3",
- },
- else_="other",
- ),
- info_table.c.pk,
- info_table.c.info,
- ],
- from_obj=[info_table],
- )
+ case(
+ {
+ info_table.c.pk < 3: "lessthan3",
+ info_table.c.pk >= 3: "gt3",
+ },
+ else_="other",
+ ),
+ info_table.c.pk,
+ info_table.c.info,
+ ).select_from(info_table)
eq_(
query.execute().fetchall(),
[
{"orm": True, "parententity": MyEntity("b", table_a)}
),
table_a.c.a._annotate(
- {"orm": True, "parententity": MyEntity("b", select([table_a]))}
+ {"orm": True, "parententity": MyEntity("b", select(table_a))}
),
table_a.c.a._annotate(
{
"orm": True,
"parententity": MyEntity(
- "b", select([table_a]).where(table_a.c.a == 5)
+ "b", select(table_a).where(table_a.c.a == 5)
),
}
),
{"orm": True, "parententity": MyEntity("b", table_a)}
),
table_a._annotate(
- {"orm": True, "parententity": MyEntity("b", select([table_a]))}
+ {"orm": True, "parententity": MyEntity("b", select(table_a))}
),
),
lambda: (
Slice(2, 10, 15),
),
lambda: (
- select([table_a.c.a]),
- select([table_a.c.a, table_a.c.b]),
- select([table_a.c.b, table_a.c.a]),
- select([table_a.c.b, table_a.c.a]).limit(5),
- select([table_a.c.b, table_a.c.a]).limit(5).offset(10),
- select([table_a.c.b, table_a.c.a])
+ select(table_a.c.a),
+ select(table_a.c.a, table_a.c.b),
+ select(table_a.c.b, table_a.c.a),
+ select(table_a.c.b, table_a.c.a).limit(5),
+ select(table_a.c.b, table_a.c.a).limit(5).offset(10),
+ select(table_a.c.b, table_a.c.a)
.limit(literal_column("foobar"))
.offset(10),
- select([table_a.c.b, table_a.c.a]).apply_labels(),
- select([table_a.c.a]).where(table_a.c.b == 5),
- select([table_a.c.a])
+ select(table_a.c.b, table_a.c.a).apply_labels(),
+ select(table_a.c.a).where(table_a.c.b == 5),
+ select(table_a.c.a)
.where(table_a.c.b == 5)
.where(table_a.c.a == 10),
- select([table_a.c.a]).where(table_a.c.b == 5).with_for_update(),
- select([table_a.c.a])
+ select(table_a.c.a).where(table_a.c.b == 5).with_for_update(),
+ select(table_a.c.a)
.where(table_a.c.b == 5)
.with_for_update(nowait=True),
- select([table_a.c.a]).where(table_a.c.b == 5).correlate(table_b),
- select([table_a.c.a])
+ select(table_a.c.a).where(table_a.c.b == 5).correlate(table_b),
+ select(table_a.c.a)
.where(table_a.c.b == 5)
.correlate_except(table_b),
),
select(table_a.c.a).join(table_c, table_a.c.a == table_c.c.x),
),
lambda: (
- select([table_a.c.a]).cte(),
- select([table_a.c.a]).cte(recursive=True),
- select([table_a.c.a]).cte(name="some_cte", recursive=True),
- select([table_a.c.a]).cte(name="some_cte"),
- select([table_a.c.a]).cte(name="some_cte").alias("other_cte"),
- select([table_a.c.a])
+ select(table_a.c.a).cte(),
+ select(table_a.c.a).cte(recursive=True),
+ select(table_a.c.a).cte(name="some_cte", recursive=True),
+ select(table_a.c.a).cte(name="some_cte"),
+ select(table_a.c.a).cte(name="some_cte").alias("other_cte"),
+ select(table_a.c.a)
.cte(name="some_cte")
- .union_all(select([table_a.c.a])),
- select([table_a.c.a])
+ .union_all(select(table_a.c.a)),
+ select(table_a.c.a)
.cte(name="some_cte")
- .union_all(select([table_a.c.b])),
- select([table_a.c.a]).lateral(),
- select([table_a.c.a]).lateral(name="bar"),
+ .union_all(select(table_a.c.b)),
+ select(table_a.c.a).lateral(),
+ select(table_a.c.a).lateral(name="bar"),
table_a.tablesample(func.bernoulli(1)),
table_a.tablesample(func.bernoulli(1), seed=func.random()),
table_a.tablesample(func.bernoulli(1), seed=func.other_random()),
table_a.insert().values({})._annotate({"nocache": True}),
table_b.insert(),
table_b.insert().with_dialect_options(sqlite_foo="some value"),
- table_b.insert().from_select(["a", "b"], select([table_a])),
+ table_b.insert().from_select(["a", "b"], select(table_a)),
table_b.insert().from_select(
- ["a", "b"], select([table_a]).where(table_a.c.a > 5)
+ ["a", "b"], select(table_a).where(table_a.c.a > 5)
),
- table_b.insert().from_select(["a", "b"], select([table_b])),
- table_b.insert().from_select(["c", "d"], select([table_a])),
+ table_b.insert().from_select(["a", "b"], select(table_b)),
+ table_b.insert().from_select(["c", "d"], select(table_a)),
table_b.insert().returning(table_b.c.a),
table_b.insert().returning(table_b.c.a, table_b.c.b),
table_b.insert().inline(),
# ),
),
lambda: (
- select([table_a.c.a]),
- select([table_a.c.a]).prefix_with("foo"),
- select([table_a.c.a]).prefix_with("foo", dialect="mysql"),
- select([table_a.c.a]).prefix_with("foo", dialect="postgresql"),
- select([table_a.c.a]).prefix_with("bar"),
- select([table_a.c.a]).suffix_with("bar"),
+ select(table_a.c.a),
+ select(table_a.c.a).prefix_with("foo"),
+ select(table_a.c.a).prefix_with("foo", dialect="mysql"),
+ select(table_a.c.a).prefix_with("foo", dialect="postgresql"),
+ select(table_a.c.a).prefix_with("bar"),
+ select(table_a.c.a).suffix_with("bar"),
),
lambda: (
- select([table_a_2.c.a]),
- select([table_a_2_fs.c.a]),
- select([table_a_2_bs.c.a]),
+ select(table_a_2.c.a),
+ select(table_a_2_fs.c.a),
+ select(table_a_2_bs.c.a),
),
lambda: (
- select([table_a.c.a]),
- select([table_a.c.a]).with_hint(None, "some hint"),
- select([table_a.c.a]).with_hint(None, "some other hint"),
- select([table_a.c.a]).with_hint(table_a, "some hint"),
- select([table_a.c.a])
+ select(table_a.c.a),
+ select(table_a.c.a).with_hint(None, "some hint"),
+ select(table_a.c.a).with_hint(None, "some other hint"),
+ select(table_a.c.a).with_hint(table_a, "some hint"),
+ select(table_a.c.a)
.with_hint(table_a, "some hint")
.with_hint(None, "some other hint"),
- select([table_a.c.a]).with_hint(table_a, "some other hint"),
- select([table_a.c.a]).with_hint(
+ select(table_a.c.a).with_hint(table_a, "some other hint"),
+ select(table_a.c.a).with_hint(
table_a, "some hint", dialect_name="mysql"
),
- select([table_a.c.a]).with_hint(
+ select(table_a.c.a).with_hint(
table_a, "some hint", dialect_name="postgresql"
),
),
table_a.alias("b"),
table_a.alias(),
table_b.alias("a"),
- select([table_a.c.a]).alias("a"),
+ select(table_a.c.a).alias("a"),
),
lambda: (
FromGrouping(table_a.alias("a")),
FromGrouping(table_a.alias("b")),
),
lambda: (
- SelectStatementGrouping(select([table_a])),
- SelectStatementGrouping(select([table_b])),
+ SelectStatementGrouping(select(table_a)),
+ SelectStatementGrouping(select(table_b)),
),
lambda: (
- select([table_a.c.a]).scalar_subquery(),
- select([table_a.c.a]).where(table_a.c.b == 5).scalar_subquery(),
+ select(table_a.c.a).scalar_subquery(),
+ select(table_a.c.a).where(table_a.c.b == 5).scalar_subquery(),
),
lambda: (
exists().where(table_a.c.a == 5),
exists().where(table_a.c.b == 5),
),
lambda: (
- union(select([table_a.c.a]), select([table_a.c.b])),
- union(select([table_a.c.a]), select([table_a.c.b])).order_by("a"),
- union_all(select([table_a.c.a]), select([table_a.c.b])),
- union(select([table_a.c.a])),
+ union(select(table_a.c.a), select(table_a.c.b)),
+ union(select(table_a.c.a), select(table_a.c.b)).order_by("a"),
+ union_all(select(table_a.c.a), select(table_a.c.b)),
+ union(select(table_a.c.a)),
union(
- select([table_a.c.a]),
- select([table_a.c.b]).where(table_a.c.b > 5),
+ select(table_a.c.a),
+ select(table_a.c.b).where(table_a.c.b > 5),
),
),
lambda: (
a2 = table_b_like_a.alias()
stmt = (
- select([table_a.c.a, a1.c.b, a2.c.b])
+ select(table_a.c.a, a1.c.b, a2.c.b)
.where(table_a.c.b == a1.c.b)
.where(a1.c.b == a2.c.b)
.where(a1.c.a == 5)
a2 = table_a.alias()
stmt = (
- select([table_a.c.a, a1.c.b, a2.c.b])
+ select(table_a.c.a, a1.c.b, a2.c.b)
.where(table_a.c.b == a1.c.b)
.where(a1.c.b == a2.c.b)
.where(a1.c.a == 5)
def two():
inner = one().subquery()
- stmt = select([table_b.c.a, inner.c.a, inner.c.b]).select_from(
+ stmt = select(table_b.c.a, inner.c.a, inner.c.b).select_from(
table_b.join(inner, table_b.c.b == inner.c.b)
)
ex = exists().where(table_b.c.b == a1.c.a)
stmt = (
- select([a1.c.a, a2.c.a])
+ select(a1.c.a, a2.c.a)
.select_from(a1.join(a2, a1.c.b == a2.c.b))
.where(ex)
)
def _statements_w_context_options_fixtures():
return [
- select([table_a])._add_context_option(opt1, True),
- select([table_a])._add_context_option(opt1, 5),
- select([table_a])
+ select(table_a)._add_context_option(opt1, True),
+ select(table_a)._add_context_option(opt1, 5),
+ select(table_a)
._add_context_option(opt1, True)
._add_context_option(opt2, True),
- select([table_a])
+ select(table_a)
._add_context_option(opt1, True)
._add_context_option(opt2, 5),
- select([table_a])._add_context_option(opt3, True),
+ select(table_a)._add_context_option(opt3, True),
]
fixtures.append(_statements_w_context_options_fixtures)
l = c.label(None)
# new case as of Id810f485c5f7ed971529489b84694e02a3356d6d
- subq = select([l]).subquery()
+ subq = select(l).subquery()
# this creates a ColumnClause as a proxy to the Label() that has
# an anoymous name, so the column has one too.
l = c.label(None)
# new case as of Id810f485c5f7ed971529489b84694e02a3356d6d
- subq = select([l]).subquery()
+ subq = select(l).subquery()
# this creates a ColumnClause as a proxy to the Label() that has
# an anoymous name, so the column has one too.
l1, l2 = table_a.c.a.label(None), table_a.c.b.label(None)
- stmt = select([table_a.c.a, table_a.c.b, l1, l2])
+ stmt = select(table_a.c.a, table_a.c.b, l1, l2)
subq = stmt.subquery()
- return select([subq]).where(subq.c[2] == 10)
+ return select(subq).where(subq.c[2] == 10)
return (
one(),
f2 = Foobar2()
eq_(f2._generate_cache_key(), None)
- s1 = select([column("q"), Foobar2()])
+ s1 = select(column("q"), Foobar2())
eq_(s1._generate_cache_key(), None)
def test_generative_cache_key_regen(self):
t1 = table("t1", column("a"), column("b"))
- s1 = select([t1])
+ s1 = select(t1)
ck1 = s1._generate_cache_key()
def test_generative_cache_key_regen_w_del(self):
t1 = table("t1", column("a"), column("b"))
- s1 = select([t1])
+ s1 = select(t1)
ck1 = s1._generate_cache_key()
def test_compare_col_identity(self):
stmt1 = (
- select([table_a.c.a, table_b.c.b])
+ select(table_a.c.a, table_b.c.b)
.where(table_a.c.a == table_b.c.b)
.alias()
)
stmt1_c = (
- select([table_a.c.a, table_b.c.b])
+ select(table_a.c.a, table_b.c.b)
.where(table_a.c.a == table_b.c.b)
.alias()
)
- stmt2 = union(select([table_a]), select([table_b]))
+ stmt2 = union(select(table_a), select(table_b))
equivalents = {table_a.c.a: [table_b.c.a]}
is_false(l1.compare(l2))
def test_cache_key_limit_offset_values(self):
- s1 = select([column("q")]).limit(10)
- s2 = select([column("q")]).limit(25)
- s3 = select([column("q")]).limit(25).offset(5)
- s4 = select([column("q")]).limit(25).offset(18)
- s5 = select([column("q")]).limit(7).offset(12)
- s6 = select([column("q")]).limit(literal_column("q")).offset(12)
+ s1 = select(column("q")).limit(10)
+ s2 = select(column("q")).limit(25)
+ s3 = select(column("q")).limit(25).offset(5)
+ s4 = select(column("q")).limit(25).offset(18)
+ s5 = select(column("q")).limit(7).offset(12)
+ s6 = select(column("q")).limit(literal_column("q")).offset(12)
for should_eq_left, should_eq_right in [(s1, s2), (s3, s4), (s3, s5)]:
eq_(
x_a.compare(x_b._annotate({"bar": True}), compare_annotations=True)
)
- s1 = select([t.c.x])._annotate({"foo": True})
- s2 = select([t.c.x])._annotate({"foo": True})
+ s1 = select(t.c.x)._annotate({"foo": True})
+ s2 = select(t.c.x)._annotate({"foo": True})
is_true(s1.compare(s2, compare_annotations=True))
is_true((t.c.x == 5).compare(x_a == 5))
is_false((t.c.y == 5).compare(x_a == 5))
- s = select([t]).subquery()
+ s = select(t).subquery()
x_p = s.c.x
is_false(x_a.compare(x_p))
is_false(t.c.x.compare(x_p))
select(t2, s1.correlate_except(t2).alias())
)
- def test_correlate_except_none(self):
+ @testing.combinations(False, None)
+ def test_correlate_except_none(self, value):
t1, t2, s1 = self._fixture()
self._assert_where_all_correlated(
select(t1, t2).where(
- t2.c.a == s1.correlate_except(None).scalar_subquery()
+ t2.c.a == s1.correlate_except(value).scalar_subquery()
)
)
select(t2).having(t2.c.a == s1.scalar_subquery())
)
- def test_correlate_disabled_where(self):
+ @testing.combinations(False, None)
+ def test_correlate_disabled_where(self, value):
t1, t2, s1 = self._fixture()
self._assert_where_uncorrelated(
- select(t2).where(t2.c.a == s1.correlate(None).scalar_subquery())
+ select(t2).where(t2.c.a == s1.correlate(value).scalar_subquery())
)
def test_correlate_disabled_column(self):
regional_sales = (
select(
- [
- orders.c.region,
- func.sum(orders.c.amount).label("total_sales"),
- ]
+ orders.c.region,
+ func.sum(orders.c.amount).label("total_sales"),
)
.group_by(orders.c.region)
.cte("regional_sales")
)
top_regions = (
- select([regional_sales.c.region])
+ select(regional_sales.c.region)
.where(
regional_sales.c.total_sales
> select(
- [func.sum(regional_sales.c.total_sales) / 10]
+ func.sum(regional_sales.c.total_sales) / 10
).scalar_subquery()
)
.cte("top_regions")
s = (
select(
- [
- orders.c.region,
- orders.c.product,
- func.sum(orders.c.quantity).label("product_units"),
- func.sum(orders.c.amount).label("product_sales"),
- ]
+ orders.c.region,
+ orders.c.product,
+ func.sum(orders.c.quantity).label("product_units"),
+ func.sum(orders.c.amount).label("product_sales"),
)
- .where(orders.c.region.in_(select([top_regions.c.region])))
+ .where(orders.c.region.in_(select(top_regions.c.region)))
.group_by(orders.c.region, orders.c.product)
)
)
included_parts = (
- select([parts.c.sub_part, parts.c.part, parts.c.quantity])
+ select(parts.c.sub_part, parts.c.part, parts.c.quantity)
.where(parts.c.part == "our part")
.cte(recursive=True)
)
parts_alias = parts.alias()
included_parts = included_parts.union(
select(
- [
- parts_alias.c.sub_part,
- parts_alias.c.part,
- parts_alias.c.quantity,
- ]
+ parts_alias.c.sub_part,
+ parts_alias.c.part,
+ parts_alias.c.quantity,
).where(parts_alias.c.part == incl_alias.c.sub_part)
)
s = (
select(
- [
- included_parts.c.sub_part,
- func.sum(included_parts.c.quantity).label(
- "total_quantity"
- ),
- ]
+ included_parts.c.sub_part,
+ func.sum(included_parts.c.quantity).label("total_quantity"),
)
.select_from(
included_parts.join(
)
included_parts = (
- select([parts.c.sub_part, parts.c.part, parts.c.quantity])
+ select(parts.c.sub_part, parts.c.part, parts.c.quantity)
.where(parts.c.part == "our part")
.cte(recursive=True)
)
parts_alias = parts.alias()
included_parts = incl_alias.union(
select(
- [
- parts_alias.c.sub_part,
- parts_alias.c.part,
- parts_alias.c.quantity,
- ]
+ parts_alias.c.sub_part,
+ parts_alias.c.part,
+ parts_alias.c.quantity,
).where(parts_alias.c.part == incl_alias.c.sub_part)
)
s = (
select(
- [
- included_parts.c.sub_part,
- func.sum(included_parts.c.quantity).label(
- "total_quantity"
- ),
- ]
+ included_parts.c.sub_part,
+ func.sum(included_parts.c.quantity).label("total_quantity"),
)
.select_from(
included_parts.join(
)
def test_recursive_union_no_alias_one(self):
- s1 = select([literal(0).label("x")])
+ s1 = select(literal(0).label("x"))
cte = s1.cte(name="cte", recursive=True)
- cte = cte.union_all(select([cte.c.x + 1]).where(cte.c.x < 10))
- s2 = select([cte])
+ cte = cte.union_all(select(cte.c.x + 1).where(cte.c.x < 10))
+ s2 = select(cte)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
)
def test_recursive_union_alias_one(self):
- s1 = select([literal(0).label("x")])
+ s1 = select(literal(0).label("x"))
cte = s1.cte(name="cte", recursive=True)
- cte = cte.union_all(select([cte.c.x + 1]).where(cte.c.x < 10)).alias(
+ cte = cte.union_all(select(cte.c.x + 1).where(cte.c.x < 10)).alias(
"cr1"
)
- s2 = select([cte])
+ s2 = select(cte)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# I know, this is the PG VALUES keyword,
# we're cheating here. also yes we need the SELECT,
# sorry PG.
- t = select([func.values(1).label("n")]).cte("t", recursive=True)
- t = t.union_all(select([t.c.n + 1]).where(t.c.n < 100))
- s = select([func.sum(t.c.n)])
+ t = select(func.values(1).label("n")).cte("t", recursive=True)
+ t = t.union_all(select(t.c.n + 1).where(t.c.n < 100))
+ s = select(func.sum(t.c.n))
self.assert_compile(
s,
"WITH RECURSIVE t(n) AS "
# I know, this is the PG VALUES keyword,
# we're cheating here. also yes we need the SELECT,
# sorry PG.
- t = select([func.values(1).label("n")]).cte("t", recursive=True)
- t = t.union_all(select([t.c.n + 1]).where(t.c.n < 100)).alias("ta")
- s = select([func.sum(t.c.n)])
+ t = select(func.values(1).label("n")).cte("t", recursive=True)
+ t = t.union_all(select(t.c.n + 1).where(t.c.n < 100)).alias("ta")
+ s = select(func.sum(t.c.n))
self.assert_compile(
s,
"WITH RECURSIVE t(n) AS "
# like test one, but let's refer to the CTE
# in a sibling CTE.
- s1 = select([literal(0).label("x")])
+ s1 = select(literal(0).label("x"))
cte = s1.cte(name="cte", recursive=True)
# can't do it here...
- # bar = select([cte]).cte('bar')
- cte = cte.union_all(select([cte.c.x + 1]).where(cte.c.x < 10))
- bar = select([cte]).cte("bar")
+ # bar = select(cte).cte('bar')
+ cte = cte.union_all(select(cte.c.x + 1).where(cte.c.x < 10))
+ bar = select(cte).cte("bar")
- s2 = select([cte, bar])
+ s2 = select(cte, bar)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# like test one, but let's refer to the CTE
# in a sibling CTE.
- s1 = select([literal(0).label("x")])
+ s1 = select(literal(0).label("x"))
cte = s1.cte(name="cte", recursive=True)
# can't do it here...
- # bar = select([cte]).cte('bar')
- cte = cte.union_all(select([cte.c.x + 1]).where(cte.c.x < 10)).alias(
+ # bar = select(cte).cte('bar')
+ cte = cte.union_all(select(cte.c.x + 1).where(cte.c.x < 10)).alias(
"cs1"
)
- bar = select([cte]).cte("bar").alias("cs2")
+ bar = select(cte).cte("bar").alias("cs2")
- s2 = select([cte, bar])
+ s2 = select(cte, bar)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# how the compiler resolves multiple instances
# of "cte".
- s1 = select([literal(0).label("x")])
+ s1 = select(literal(0).label("x"))
cte = s1.cte(name="cte", recursive=True)
- bar = select([cte]).cte("bar")
- cte = cte.union_all(select([cte.c.x + 1]).where(cte.c.x < 10))
+ bar = select(cte).cte("bar")
+ cte = cte.union_all(select(cte.c.x + 1).where(cte.c.x < 10))
# outer cte rendered first, then bar, which
# includes "inner" cte
- s2 = select([cte, bar])
+ s2 = select(cte, bar)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# bar rendered, only includes "inner" cte,
# "outer" cte isn't present
- s2 = select([bar])
+ s2 = select(bar)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# bar rendered, but then the "outer"
# cte is rendered.
- s2 = select([bar, cte])
+ s2 = select(bar, cte)
self.assert_compile(
s2,
"WITH RECURSIVE bar AS (SELECT cte.x AS x FROM cte), "
# how the compiler resolves multiple instances
# of "cte".
- s1 = select([literal(0).label("x")])
+ s1 = select(literal(0).label("x"))
cte = s1.cte(name="cte", recursive=True)
- bar = select([cte]).cte("bar").alias("cs1")
- cte = cte.union_all(select([cte.c.x + 1]).where(cte.c.x < 10)).alias(
+ bar = select(cte).cte("bar").alias("cs1")
+ cte = cte.union_all(select(cte.c.x + 1).where(cte.c.x < 10)).alias(
"cs2"
)
# outer cte rendered first, then bar, which
# includes "inner" cte
- s2 = select([cte, bar])
+ s2 = select(cte, bar)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# bar rendered, only includes "inner" cte,
# "outer" cte isn't present
- s2 = select([bar])
+ s2 = select(bar)
self.assert_compile(
s2,
"WITH RECURSIVE cte(x) AS "
# bar rendered, but then the "outer"
# cte is rendered.
- s2 = select([bar, cte])
+ s2 = select(bar, cte)
self.assert_compile(
s2,
"WITH RECURSIVE bar AS (SELECT cte.x AS x FROM cte), "
def test_conflicting_names(self):
"""test a flat out name conflict."""
- s1 = select([1])
+ s1 = select(1)
c1 = s1.cte(name="cte1", recursive=True)
- s2 = select([1])
+ s2 = select(1)
c2 = s2.cte(name="cte1", recursive=True)
- s = select([c1, c2])
+ s = select(c1, c2)
assert_raises_message(
CompileError,
"Multiple, unrelated CTEs found " "with the same name: 'cte1'",
def test_union(self):
orders = table("orders", column("region"), column("amount"))
- regional_sales = select([orders.c.region, orders.c.amount]).cte(
+ regional_sales = select(orders.c.region, orders.c.amount).cte(
"regional_sales"
)
- s = select([regional_sales.c.region]).where(
+ s = select(regional_sales.c.region).where(
regional_sales.c.amount > 500
)
)
s = s.union_all(
- select([regional_sales.c.region]).where(
+ select(regional_sales.c.region).where(
regional_sales.c.amount < 300
)
)
orders = table("orders", column("region"), column("amount"))
regional_sales = (
- select([orders.c.region, orders.c.amount])
+ select(orders.c.region, orders.c.amount)
.cte("regional_sales")
.alias("rs")
)
- s = select([regional_sales.c.region]).where(
+ s = select(regional_sales.c.region).where(
regional_sales.c.amount > 500
)
)
s = s.union_all(
- select([regional_sales.c.region]).where(
+ select(regional_sales.c.region).where(
regional_sales.c.amount < 300
)
)
tag = table("tag", column("tag"), column("entity_id"))
tags = (
- select([tag.c.entity_id, func.array_agg(tag.c.tag).label("tags")])
+ select(tag.c.entity_id, func.array_agg(tag.c.tag).label("tags"))
.group_by(tag.c.entity_id)
.cte("unaliased_tags")
)
employer_tags = tags.alias(name="employer_tags")
q = (
- select([entity.c.name])
+ select(entity.c.name)
.select_from(
entity.outerjoin(
entity_tags, tags.c.entity_id == entity.c.id
def test_reserved_quote(self):
orders = table("orders", column("order"))
- s = select([orders.c.order]).cte("regional_sales", recursive=True)
- s = select([s.c.order])
+ s = select(orders.c.order).cte("regional_sales", recursive=True)
+ s = select(s.c.order)
self.assert_compile(
s,
'WITH RECURSIVE regional_sales("order") AS '
)
def test_multi_subq_quote(self):
- cte = select([literal(1).label("id")]).cte(name="CTE")
+ cte = select(literal(1).label("id")).cte(name="CTE")
- s1 = select([cte.c.id]).alias()
- s2 = select([cte.c.id]).alias()
+ s1 = select(cte.c.id).alias()
+ s2 = select(cte.c.id).alias()
- s = select([s1, s2])
+ s = select(s1, s2)
self.assert_compile(
s,
'WITH "CTE" AS (SELECT :param_1 AS id) '
)
def test_multi_subq_alias(self):
- cte = select([literal(1).label("id")]).cte(name="cte1").alias("aa")
+ cte = select(literal(1).label("id")).cte(name="cte1").alias("aa")
- s1 = select([cte.c.id]).alias()
- s2 = select([cte.c.id]).alias()
+ s1 = select(cte.c.id).alias()
+ s2 = select(cte.c.id).alias()
- s = select([s1, s2])
+ s = select(s1, s2)
self.assert_compile(
s,
"WITH cte1 AS (SELECT :param_1 AS id) "
b = table("b", column("id"), column("fid"))
c = table("c", column("id"), column("fid"))
- cte1 = select([a.c.id]).cte(name="cte1")
+ cte1 = select(a.c.id).cte(name="cte1")
aa = cte1.alias("aa")
cte2 = (
- select([b.c.id])
+ select(b.c.id)
.select_from(b.join(aa, b.c.fid == aa.c.id))
.cte(name="cte2")
)
cte3 = (
- select([c.c.id])
+ select(c.c.id)
.select_from(c.join(aa, c.c.fid == aa.c.id))
.cte(name="cte3")
)
- stmt = select([cte3.c.id, cte2.c.id]).select_from(
+ stmt = select(cte3.c.id, cte2.c.id).select_from(
cte2.join(cte3, cte2.c.id == cte3.c.id)
)
self.assert_compile(
)
def test_named_alias_no_quote(self):
- cte = select([literal(1).label("id")]).cte(name="CTE")
+ cte = select(literal(1).label("id")).cte(name="CTE")
- s1 = select([cte.c.id]).alias(name="no_quotes")
+ s1 = select(cte.c.id).alias(name="no_quotes")
- s = select([s1])
+ s = select(s1)
self.assert_compile(
s,
'WITH "CTE" AS (SELECT :param_1 AS id) '
)
def test_named_alias_quote(self):
- cte = select([literal(1).label("id")]).cte(name="CTE")
+ cte = select(literal(1).label("id")).cte(name="CTE")
- s1 = select([cte.c.id]).alias(name="Quotes Required")
+ s1 = select(cte.c.id).alias(name="Quotes Required")
- s = select([s1])
+ s = select(s1)
self.assert_compile(
s,
'WITH "CTE" AS (SELECT :param_1 AS id) '
)
def test_named_alias_disable_quote(self):
- cte = select([literal(1).label("id")]).cte(
+ cte = select(literal(1).label("id")).cte(
name=quoted_name("CTE", quote=False)
)
- s1 = select([cte.c.id]).alias(
- name=quoted_name("DontQuote", quote=False)
- )
+ s1 = select(cte.c.id).alias(name=quoted_name("DontQuote", quote=False))
- s = select([s1])
+ s = select(s1)
self.assert_compile(
s,
"WITH CTE AS (SELECT :param_1 AS id) "
def test_positional_binds(self):
orders = table("orders", column("order"))
- s = select([orders.c.order, literal("x")]).cte("regional_sales")
- s = select([s.c.order, literal("y")])
+ s = select(orders.c.order, literal("x")).cte("regional_sales")
+ s = select(s.c.order, literal("y"))
dialect = default.DefaultDialect()
dialect.positional = True
dialect.paramstyle = "numeric"
)
s = (
- select([orders.c.order])
+ select(orders.c.order)
.where(orders.c.order == "x")
.cte("regional_sales")
)
- s = select([s.c.order]).where(s.c.order == "y")
+ s = select(s.c.order).where(s.c.order == "y")
self.assert_compile(
s,
'WITH regional_sales AS (SELECT orders."order" AS '
def test_positional_binds_2(self):
orders = table("orders", column("order"))
- s = select([orders.c.order, literal("x")]).cte("regional_sales")
- s = select([s.c.order, literal("y")])
+ s = select(orders.c.order, literal("x")).cte("regional_sales")
+ s = select(s.c.order, literal("y"))
dialect = default.DefaultDialect()
dialect.positional = True
dialect.paramstyle = "numeric"
s1 = (
- select([orders.c.order])
+ select(orders.c.order)
.where(orders.c.order == "x")
.cte("regional_sales_1")
)
s2 = (
select(
- [
- orders.c.order == "y",
- s1a.c.order,
- orders.c.order,
- s1.c.order,
- ]
+ orders.c.order == "y", s1a.c.order, orders.c.order, s1.c.order,
)
.where(orders.c.order == "z")
.cte("regional_sales_2")
)
- s3 = select([s2])
+ s3 = select(s2)
self.assert_compile(
s3,
def test_positional_binds_2_asliteral(self):
orders = table("orders", column("order"))
- s = select([orders.c.order, literal("x")]).cte("regional_sales")
- s = select([s.c.order, literal("y")])
+ s = select(orders.c.order, literal("x")).cte("regional_sales")
+ s = select(s.c.order, literal("y"))
dialect = default.DefaultDialect()
dialect.positional = True
dialect.paramstyle = "numeric"
s1 = (
- select([orders.c.order])
+ select(orders.c.order)
.where(orders.c.order == "x")
.cte("regional_sales_1")
)
s2 = (
select(
- [
- orders.c.order == "y",
- s1a.c.order,
- orders.c.order,
- s1.c.order,
- ]
+ orders.c.order == "y", s1a.c.order, orders.c.order, s1.c.order,
)
.where(orders.c.order == "z")
.cte("regional_sales_2")
)
- s3 = select([s2])
+ s3 = select(s2)
self.assert_compile(
s3,
def test_all_aliases(self):
orders = table("order", column("order"))
- s = select([orders.c.order]).cte("regional_sales")
+ s = select(orders.c.order).cte("regional_sales")
r1 = s.alias()
r2 = s.alias()
- s2 = select([r1, r2]).where(r1.c.order > r2.c.order)
+ s2 = select(r1, r2).where(r1.c.order > r2.c.order)
self.assert_compile(
s2,
'regional_sales AS anon_2 WHERE anon_1."order" > anon_2."order"',
)
- s3 = select([orders]).select_from(
+ s3 = select(orders).select_from(
orders.join(r1, r1.c.order == orders.c.order)
)
def test_prefixes(self):
orders = table("order", column("order"))
- s = select([orders.c.order]).cte("regional_sales")
+ s = select(orders.c.order).cte("regional_sales")
s = s.prefix_with("NOT MATERIALIZED", dialect="postgresql")
- stmt = select([orders]).where(orders.c.order > s.c.order)
+ stmt = select(orders).where(orders.c.order > s.c.order)
self.assert_compile(
stmt,
def test_suffixes(self):
orders = table("order", column("order"))
- s = select([orders.c.order]).cte("regional_sales")
+ s = select(orders.c.order).cte("regional_sales")
s = s.suffix_with("pg suffix", dialect="postgresql")
s = s.suffix_with("oracle suffix", dialect="oracle")
- stmt = select([orders]).where(orders.c.order > s.c.order)
+ stmt = select(orders).where(orders.c.order > s.c.order)
self.assert_compile(
stmt,
insert = orders.insert().from_select(
orders.c.keys(),
select(
- [
- literal("Region1"),
- literal(1.0),
- literal("Product1"),
- literal(1),
- ]
+ literal("Region1"),
+ literal(1.0),
+ literal("Product1"),
+ literal(1),
).where(~exists(upsert.select())),
)
parts = table("parts", column("part"), column("sub_part"))
included_parts = (
- select([parts.c.sub_part, parts.c.part])
+ select(parts.c.sub_part, parts.c.part)
.where(parts.c.part == "our part")
.cte("included_parts", recursive=True)
)
pr = included_parts.alias("pr")
p = parts.alias("p")
included_parts = included_parts.union_all(
- select([p.c.sub_part, p.c.part]).where(p.c.part == pr.c.sub_part)
+ select(p.c.sub_part, p.c.part).where(p.c.part == pr.c.sub_part)
)
stmt = (
parts.delete()
- .where(parts.c.part.in_(select([included_parts.c.part])))
+ .where(parts.c.part.in_(select(included_parts.c.part)))
.returning(parts.c.part)
)
.cte("pd")
)
- stmt = select([cte])
+ stmt = select(cte)
assert "autocommit" not in stmt._execution_options
eq_(stmt.compile().execution_options["autocommit"], True)
def test_standalone_function(self):
a = table("a", column("x"))
- a_stmt = select([a])
+ a_stmt = select(a)
- stmt = select([cte(a_stmt)])
+ stmt = select(cte(a_stmt))
self.assert_compile(
stmt,
def test_no_alias_construct(self):
a = table("a", column("x"))
- a_stmt = select([a])
+ a_stmt = select(a)
assert_raises_message(
NotImplementedError,
def myupdate_with_ctx(ctx):
conn = ctx.connection
- return conn.execute(sa.select([sa.text("13")])).scalar()
+ return conn.execute(sa.select(sa.text("13"))).scalar()
def mydefault_using_connection(ctx):
conn = ctx.connection
- return conn.execute(sa.select([sa.text("12")])).scalar()
+ return conn.execute(sa.select(sa.text("12"))).scalar()
use_function_defaults = testing.against("postgresql", "mssql")
is_oracle = testing.against("oracle")
if is_oracle:
ts = conn.scalar(
sa.select(
- [
- func.trunc(
- func.current_timestamp(),
- sa.literal_column("'DAY'"),
- type_=sa.Date,
- )
- ]
+ func.trunc(
+ func.current_timestamp(),
+ sa.literal_column("'DAY'"),
+ type_=sa.Date,
+ )
)
)
currenttime = cls.currenttime = func.trunc(
connection.execute(t.insert())
ctexec = connection.execute(
- sa.select([self.currenttime.label("now")])
+ sa.select(self.currenttime.label("now"))
).scalar()
result = connection.execute(t.select().order_by(t.c.col1))
today = datetime.date.today()
expected = (7, 5)
elif a == "select":
conn.execute(q.insert().values(x=5, y=10, z=1))
- cte = sa.select([q.c.z]).cte("c")
+ cte = sa.select(q.c.z).cte("c")
expected = (5, 10)
if b == "select":
conn.execute(p.insert().values(s=1))
- stmt = select([p.c.s, cte.c.z]).where(p.c.s == cte.c.z)
+ stmt = select(p.c.s, cte.c.z).where(p.c.s == cte.c.z)
elif b == "insert":
- sel = select([1, cte.c.z])
+ sel = select(1, cte.c.z)
stmt = (
p.insert().from_select(["s", "t"], sel).returning(p.c.s, p.c.t)
)
)
eq_(list(conn.execute(stmt)), [(1, 1)])
- eq_(conn.execute(select([q.c.x, q.c.y])).first(), expected)
+ eq_(conn.execute(select(q.c.x, q.c.y)).first(), expected)
class PKDefaultTest(fixtures.TablesTest):
"id",
Integer,
primary_key=True,
- default=sa.select([func.max(t2.c.nextid)]).scalar_subquery(),
+ default=sa.select(func.max(t2.c.nextid)).scalar_subquery(),
),
Column("data", String(30)),
)
connection.execute(t1.insert())
eq_(
1,
- connection.scalar(select([func.count(text("*"))]).select_from(t1)),
+ connection.scalar(select(func.count(text("*"))).select_from(t1)),
)
eq_(True, connection.scalar(t1.select()))
r = connection.execute(single.insert())
id_ = r.inserted_primary_key[0]
eq_(id_, 1)
- eq_(connection.scalar(sa.select([single.c.id])), 1)
+ eq_(connection.scalar(sa.select(single.c.id)), 1)
def test_autoinc_detection_no_affinity(self):
class MyType(TypeDecorator):
connection.execute(dataset_no_autoinc.insert())
eq_(
connection.scalar(
- select([func.count("*")]).select_from(dataset_no_autoinc)
+ select(func.count("*")).select_from(dataset_no_autoinc)
),
1,
)
connection.execute(dataset_no_autoinc.insert())
eq_(
connection.scalar(
- select([func.count("*")]).select_from(dataset_no_autoinc)
+ select(func.count("*")).select_from(dataset_no_autoinc)
),
1,
)
self._run_test(server_default="1", autoincrement=False)
def test_clause(self):
- stmt = select([cast("INT_1", type_=self.MyInteger)]).scalar_subquery()
+ stmt = select(cast("INT_1", type_=self.MyInteger)).scalar_subquery()
self._run_test(default=stmt)
@testing.requires.returning
table.create(connection)
- sel = select([data.c.x, data.c.y])
+ sel = select(data.c.x, data.c.y)
ins = table.insert().from_select(["x", "y"], sel)
connection.execute(ins)
table.create(connection)
- sel = select([data.c.x, data.c.y])
+ sel = select(data.c.x, data.c.y)
ins = table.insert().from_select(["x", "y"], sel)
connection.execute(ins)
table1, table2 = self.tables.mytable, self.tables.myothertable
# test a non-correlated WHERE clause
- s = select([table2.c.othername], table2.c.otherid == 7)
+ s = select(table2.c.othername).where(table2.c.otherid == 7)
self.assert_compile(
delete(table1, table1.c.name == s.scalar_subquery()),
"DELETE FROM mytable "
table1, table2 = self.tables.mytable, self.tables.myothertable
# test one that is actually correlated...
- s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
+ s = select(table2.c.othername).where(table2.c.otherid == table1.c.myid)
self.assert_compile(
table1.delete(table1.c.name == s.scalar_subquery()),
"DELETE FROM mytable "
r"The SelectBase.select\(\) method is deprecated"
):
self.assert_compile(
- select([col]).select(),
+ select(col).select(),
'SELECT anon_1."NEEDS QUOTES_" FROM '
'(SELECT NEEDS QUOTES AS "NEEDS QUOTES_") AS anon_1',
)
r"The SelectBase.select\(\) method is deprecated"
):
self.assert_compile(
- select([col]).select(),
+ select(col).select(),
'SELECT anon_1."NEEDS QUOTES_" FROM (SELECT NEEDS QUOTES AS '
'"NEEDS QUOTES_") AS anon_1',
)
r"The SelectBase.select\(\) method is deprecated"
):
self.assert_compile(
- select([col]).select(),
+ select(col).select(),
'SELECT anon_1."NEEDS QUOTES" FROM (SELECT NEEDS QUOTES AS '
'"NEEDS QUOTES") AS anon_1',
)
with testing.expect_deprecated(
"The SelectBase.c and SelectBase.columns", "Implicit coercion"
):
- stmt = select([table1.c.myid]).select_from(
+ stmt = select(table1.c.myid).select_from(
table1.join(t, table1.c.myid == t.c.id)
)
compiled = stmt.compile()
).connect() as ins_conn:
row = ins_conn.execute(
select(
- [
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive"),
- ]
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
)
).first()
).connect() as ins_conn:
row = ins_conn.execute(
select(
- [
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive"),
- text("3 AS screw_up_the_cols"),
- ]
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols"),
)
).first()
adapter = sql_util.ColumnAdapter(aliased)
- f = select([adapter.columns[c] for c in aliased2.c]).select_from(
+ f = select(*[adapter.columns[c] for c in aliased2.c]).select_from(
aliased
)
s = select(aliased2).select_from(aliased)
eq_(str(s), str(f))
- f = select([adapter.columns[func.count(aliased2.c.col1)]]).select_from(
+ f = select(adapter.columns[func.count(aliased2.c.col1)]).select_from(
aliased
)
eq_(
# aliased2. corresponding_column checks these
# now.
adapter = sql_util.ColumnAdapter(aliased1)
- f1 = select([adapter.columns[c] for c in aliased2._raw_columns])
- f2 = select([adapter.columns[c] for c in aliased3._raw_columns])
+ f1 = select(*[adapter.columns[c] for c in aliased2._raw_columns])
+ f2 = select(*[adapter.columns[c] for c in aliased3._raw_columns])
eq_(str(f1), str(f2))
def test_aliased_cloned_column_adapt_exported(self):
# have an _is_clone_of pointer. But we now modified _make_proxy
# to assign this.
adapter = sql_util.ColumnAdapter(aliased1)
- f1 = select([adapter.columns[c] for c in aliased2.c])
- f2 = select([adapter.columns[c] for c in aliased3.c])
+ f1 = select(*[adapter.columns[c] for c in aliased2.c])
+ f2 = select(*[adapter.columns[c] for c in aliased3.c])
eq_(str(f1), str(f2))
def test_aliased_cloned_schema_column_adapt_exported(self):
# have an _is_clone_of pointer. But we now modified _make_proxy
# to assign this.
adapter = sql_util.ColumnAdapter(aliased1)
- f1 = select([adapter.columns[c] for c in aliased2.c])
- f2 = select([adapter.columns[c] for c in aliased3.c])
+ f1 = select(*[adapter.columns[c] for c in aliased2.c])
+ f2 = select(*[adapter.columns[c] for c in aliased3.c])
eq_(str(f1), str(f2))
def test_labeled_expression_adapt(self):
def test_select(self):
s2 = select(t1)
s2_assert = str(s2)
- s3_assert = str(select([t1], t1.c.col2 == 7))
+ s3_assert = str(select(t1).where(t1.c.col2 == 7))
class Vis(CloningVisitor):
def visit_select(self, select):
Vis().traverse(s2)
assert str(s2) == s3_assert
- s4_assert = str(select([t1], and_(t1.c.col2 == 7, t1.c.col3 == 9)))
+ s4_assert = str(select(t1).where(and_(t1.c.col2 == 7, t1.c.col3 == 9)))
class Vis(CloningVisitor):
def visit_select(self, select):
assert str(s4) == s4_assert
assert str(s3) == s3_assert
- s5_assert = str(select([t1], and_(t1.c.col2 == 7, t1.c.col1 == 9)))
+ s5_assert = str(select(t1).where(and_(t1.c.col2 == 7, t1.c.col1 == 9)))
class Vis(CloningVisitor):
def visit_binary(self, binary):
eq_(str(u), str(u2))
eq_([str(c) for c in u2.selected_columns], cols)
- s1 = select([t1], t1.c.col1 == bindparam("id_param"))
+ s1 = select(t1).where(t1.c.col1 == bindparam("id_param"))
s2 = select(t2)
u = union(s1, s2)
"""test that unique bindparams change their name upon clone()
to prevent conflicts"""
- s = select([t1], t1.c.col1 == bindparam(None, unique=True)).alias()
+ s = select(t1).where(t1.c.col1 == bindparam(None, unique=True)).alias()
s2 = CloningVisitor().traverse(s).alias()
- s3 = select([s], s.c.col2 == s2.c.col2)
+ s3 = select(s).where(s.c.col2 == s2.c.col2)
self.assert_compile(
s3,
"WHERE anon_1.col2 = anon_2.col2",
)
- s = select([t1], t1.c.col1 == 4).alias()
+ s = select(t1).where(t1.c.col1 == 4).alias()
s2 = CloningVisitor().traverse(s).alias()
- s3 = select([s], s.c.col2 == s2.c.col2)
+ s3 = select(s).where(s.c.col2 == s2.c.col2)
self.assert_compile(
s3,
"SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM "
eq_(str(s), str(s5))
def test_correlated_select(self):
- s = select(
- [literal_column("*")], t1.c.col1 == t2.c.col1, from_obj=[t1, t2]
- ).correlate(t2)
+ s = (
+ select(literal_column("*"))
+ .where(t1.c.col1 == t2.c.col1)
+ .select_from(t1, t2)
+ .correlate(t2)
+ )
class Vis(CloningVisitor):
def visit_select(self, select):
def test_select_fromtwice_one(self):
t1a = t1.alias()
- s = select([1], t1.c.col1 == t1a.c.col1, from_obj=t1a).correlate(t1a)
+ s = (
+ select(1)
+ .where(t1.c.col1 == t1a.c.col1)
+ .select_from(t1a)
+ .correlate(t1a)
+ )
s = select(t1).where(t1.c.col1 == s.scalar_subquery())
self.assert_compile(
s,
def test_select_fromtwice_two(self):
s = select(t1).where(t1.c.col1 == "foo").alias()
- s2 = select([1], t1.c.col1 == s.c.col1, from_obj=s).correlate(t1)
+ s2 = (
+ select(1).where(t1.c.col1 == s.c.col1).select_from(s).correlate(t1)
+ )
s3 = select(t1).where(t1.c.col1 == s2.scalar_subquery())
self.assert_compile(
s3,
t2alias = t2.alias("t2alias")
vis = sql_util.ClauseAdapter(t1alias)
- s = select(
- [literal_column("*")], from_obj=[t1alias, t2alias]
- ).scalar_subquery()
+ s = (
+ select(literal_column("*"))
+ .select_from(t1alias, t2alias)
+ .scalar_subquery()
+ )
froms = list(s._iterate_from_elements())
assert t2alias in froms
assert t1alias in froms
self.assert_compile(
- select([literal_column("*")], t2alias.c.col1 == s),
+ select(literal_column("*")).where(t2alias.c.col1 == s),
"SELECT * FROM table2 AS t2alias WHERE "
"t2alias.col1 = (SELECT * FROM table1 AS "
"t1alias)",
# _cloned_set for each element in _froms when correlating
self.assert_compile(
- select([literal_column("*")], t2alias.c.col1 == s),
+ select(literal_column("*")).where(t2alias.c.col1 == s),
"SELECT * FROM table2 AS t2alias WHERE "
"t2alias.col1 = (SELECT * FROM table1 AS "
"t1alias)",
)
s = (
- select([literal_column("*")], from_obj=[t1alias, t2alias])
+ select(literal_column("*"))
+ .select_from(t1alias, t2alias)
.correlate(t2alias)
.scalar_subquery()
)
self.assert_compile(
- select([literal_column("*")], t2alias.c.col1 == s),
+ select(literal_column("*")).where(t2alias.c.col1 == s),
"SELECT * FROM table2 AS t2alias WHERE "
"t2alias.col1 = (SELECT * FROM table1 AS "
"t1alias)",
)
s = vis.traverse(s)
self.assert_compile(
- select([literal_column("*")], t2alias.c.col1 == s),
+ select(literal_column("*")).where(t2alias.c.col1 == s),
"SELECT * FROM table2 AS t2alias WHERE "
"t2alias.col1 = (SELECT * FROM table1 AS "
"t1alias)",
)
s = CloningVisitor().traverse(s)
self.assert_compile(
- select([literal_column("*")], t2alias.c.col1 == s),
+ select(literal_column("*")).where(t2alias.c.col1 == s),
"SELECT * FROM table2 AS t2alias WHERE "
"t2alias.col1 = (SELECT * FROM table1 AS "
"t1alias)",
ualias = users.alias()
- s = select(
- [func.count(addresses.c.id)], users.c.id == addresses.c.user_id
- ).correlate(users)
+ s = (
+ select(func.count(addresses.c.id))
+ .where(users.c.id == addresses.c.user_id)
+ .correlate(users)
+ )
s = sql_util.ClauseAdapter(ualias).traverse(s)
j1 = addresses.join(ualias, addresses.c.user_id == ualias.c.id)
t1alias = t1.alias("t1alias")
vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(
- vis.traverse(select([literal_column("*")], from_obj=[t1])),
+ vis.traverse(select(literal_column("*")).select_from(t1)),
"SELECT * FROM table1 AS t1alias",
)
vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(
vis.traverse(
- select([literal_column("*")], t1.c.col1 == t2.c.col2)
+ select(literal_column("*")).where(t1.c.col1 == t2.c.col2)
),
"SELECT * FROM table1 AS t1alias, table2 "
"WHERE t1alias.col1 = table2.col2",
vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(
vis.traverse(
- select(
- [literal_column("*")],
- t1.c.col1 == t2.c.col2,
- from_obj=[t1, t2],
- )
+ select(literal_column("*"))
+ .where(t1.c.col1 == t2.c.col2)
+ .select_from(t1, t2)
),
"SELECT * FROM table1 AS t1alias, table2 "
"WHERE t1alias.col1 = table2.col2",
select(t1alias, t2).where(
t1alias.c.col1
== vis.traverse(
- select(
- [literal_column("*")],
- t1.c.col1 == t2.c.col2,
- from_obj=[t1, t2],
- )
+ select(literal_column("*"))
+ .where(t1.c.col1 == t2.c.col2)
+ .select_from(t1, t2)
.correlate(t1)
.scalar_subquery()
)
select(t1alias, t2).where(
t1alias.c.col1
== vis.traverse(
- select(
- [literal_column("*")],
- t1.c.col1 == t2.c.col2,
- from_obj=[t1, t2],
- )
+ select(literal_column("*"))
+ .where(t1.c.col1 == t2.c.col2)
+ .select_from(t1, t2)
.correlate(t2)
.scalar_subquery()
)
)
def test_table_to_alias_9(self):
- s = select([literal_column("*")], from_obj=[t1]).alias("foo")
+ s = select(literal_column("*")).select_from(t1).alias("foo")
self.assert_compile(
s.select(), "SELECT foo.* FROM (SELECT * FROM table1) " "AS foo"
)
def test_table_to_alias_10(self):
- s = select([literal_column("*")], from_obj=[t1]).alias("foo")
+ s = select(literal_column("*")).select_from(t1).alias("foo")
t1alias = t1.alias("t1alias")
vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(
)
def test_table_to_alias_11(self):
- s = select([literal_column("*")], from_obj=[t1]).alias("foo")
+ s = select(literal_column("*")).select_from(t1).alias("foo")
self.assert_compile(
s.select(), "SELECT foo.* FROM (SELECT * FROM table1) " "AS foo"
)
vis.chain(sql_util.ClauseAdapter(t2alias))
self.assert_compile(
vis.traverse(
- select([literal_column("*")], t1.c.col1 == t2.c.col2)
+ select(literal_column("*")).where(t1.c.col1 == t2.c.col2)
),
"SELECT * FROM table1 AS t1alias, table2 "
"AS t2alias WHERE t1alias.col1 = "
vis.chain(sql_util.ClauseAdapter(t2alias))
self.assert_compile(
vis.traverse(
- select(["*"], t1.c.col1 == t2.c.col2, from_obj=[t1, t2])
+ select("*").where(t1.c.col1 == t2.c.col2).select_from(t1, t2)
),
"SELECT * FROM table1 AS t1alias, table2 "
"AS t2alias WHERE t1alias.col1 = "
select(t1alias, t2alias).where(
t1alias.c.col1
== vis.traverse(
- select(["*"], t1.c.col1 == t2.c.col2, from_obj=[t1, t2])
+ select("*")
+ .where(t1.c.col1 == t2.c.col2)
+ .select_from(t1, t2)
.correlate(t1)
.scalar_subquery()
)
t2alias.select().where(
t2alias.c.col2
== vis.traverse(
- select(["*"], t1.c.col1 == t2.c.col2, from_obj=[t1, t2])
+ select("*")
+ .where(t1.c.col1 == t2.c.col2)
+ .select_from(t1, t2)
.correlate(t2)
.scalar_subquery()
)
)
j1 = a.outerjoin(b)
- j2 = select([j1], use_labels=True).subquery()
+ j2 = select(j1).apply_labels().subquery()
j3 = c.join(j2, j2.c.b_id == c.c.bid)
def test_everything_is_connected(self):
query = (
- select([self.a])
+ select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.select_from(self.c)
.select_from(self.d)
assert not froms
def test_plain_cartesian(self):
- query = select([self.a]).where(self.b.c.col_b == 5)
+ query = select(self.a).where(self.b.c.col_b == 5)
froms, start = find_unmatching_froms(query, self.a)
assert start == self.a
assert froms == {self.b}
assert froms == {self.a}
def test_count_non_eq_comparison_operators(self):
- query = select([self.a]).where(self.a.c.col_a > self.b.c.col_b)
+ query = select(self.a).where(self.a.c.col_a > self.b.c.col_b)
froms, start = find_unmatching_froms(query, self.a)
is_(start, None)
is_(froms, None)
def test_dont_count_non_comparison_operators(self):
- query = select([self.a]).where(self.a.c.col_a + self.b.c.col_b == 5)
+ query = select(self.a).where(self.a.c.col_a + self.b.c.col_b == 5)
froms, start = find_unmatching_froms(query, self.a)
assert start == self.a
assert froms == {self.b}
def test_disconnect_between_ab_cd(self):
query = (
- select([self.a])
+ select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.select_from(self.c)
.select_from(self.d)
def test_c_and_d_both_disconnected(self):
query = (
- select([self.a])
+ select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.where(self.c.c.col_c == 5)
.where(self.d.c.col_d == 10)
def test_now_connected(self):
query = (
- select([self.a])
+ select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.select_from(self.c.join(self.d, self.c.c.col_c == self.d.c.col_d))
.where(self.c.c.col_c == self.b.c.col_b)
def test_disconnected_subquery(self):
subq = (
- select([self.a]).where(self.a.c.col_a == self.b.c.col_b).subquery()
+ select(self.a).where(self.a.c.col_a == self.b.c.col_b).subquery()
)
- stmt = select([self.c]).select_from(subq)
+ stmt = select(self.c).select_from(subq)
froms, start = find_unmatching_froms(stmt, self.c)
assert start == self.c
def test_now_connect_it(self):
subq = (
- select([self.a]).where(self.a.c.col_a == self.b.c.col_b).subquery()
+ select(self.a).where(self.a.c.col_a == self.b.c.col_b).subquery()
)
stmt = (
- select([self.c])
+ select(self.c)
.select_from(subq)
.where(self.c.c.col_c == subq.c.col_a)
)
assert not froms
def test_right_nested_join_without_issue(self):
- query = select([self.a]).select_from(
+ query = select(self.a).select_from(
self.a.join(
self.b.join(self.c, self.b.c.col_b == self.c.c.col_c),
self.a.c.col_a == self.b.c.col_b,
# actually a join condition. this essentially allows a cartesian
# product to be added explicitly.
- query = select([self.a]).select_from(self.a.join(self.b, true()))
+ query = select(self.a).select_from(self.a.join(self.b, true()))
froms, start = find_unmatching_froms(query)
assert not froms
def test_right_nested_join_with_an_issue(self):
query = (
- select([self.a])
+ select(self.a)
.select_from(
self.a.join(
self.b.join(self.c, self.b.c.col_b == self.c.c.col_c),
assert froms == {self.a, self.b, self.c}
def test_no_froms(self):
- query = select([1])
+ query = select(1)
froms, start = find_unmatching_froms(query)
assert not froms
def test_does_not_modify_query(self):
with self.bind.connect() as conn:
- [result] = conn.execute(select([1])).fetchone()
+ [result] = conn.execute(select(1)).fetchone()
assert result == 1
def test_warn_simple(self):
a, b = self.tables("table_a", "table_b")
- query = select([a.c.col_a]).where(b.c.col_b == 5)
+ query = select(a.c.col_a).where(b.c.col_b == 5)
with expect_warnings(
r"SELECT statement has a cartesian product between FROM "
a, b = self.tables("table_a", "table_b")
b_alias = b.alias()
- query = select([a.c.col_a]).where(b_alias.c.col_b == 5)
+ query = select(a.c.col_a).where(b_alias.c.col_b == 5)
with expect_warnings(
r"SELECT statement has a cartesian product between FROM "
def test_warn_anon_cte(self):
a, b = self.tables("table_a", "table_b")
- b_cte = select([b]).cte()
- query = select([a.c.col_a]).where(b_cte.c.col_b == 5)
+ b_cte = select(b).cte()
+ query = select(a.c.col_a).where(b_cte.c.col_b == 5)
with expect_warnings(
r"SELECT statement has a cartesian product between "
eng = engines.testing_engine(options={"enable_from_linting": False})
eng.pool = self.bind.pool # needed for SQLite
a, b = self.tables("table_a", "table_b")
- query = select([a.c.col_a]).where(b.c.col_b == 5)
+ query = select(a.c.col_a).where(b.c.col_b == 5)
with eng.connect() as conn:
conn.execute(query)
def test_use_labels(self):
self.assert_compile(
- select([func.foo()], use_labels=True), "SELECT foo() AS foo_1"
+ select(func.foo()).apply_labels(), "SELECT foo() AS foo_1"
)
def test_use_labels_function_element(self):
return "max(%s)" % compiler.process(element.clauses, **kw)
self.assert_compile(
- select([max_(5, 6)], use_labels=True),
+ select(max_(5, 6)).apply_labels(),
"SELECT max(:max_2, :max_3) AS max_1",
)
column("q"),
)
- stmt = select([func.sum(t.c.value)])
+ stmt = select(func.sum(t.c.value))
self.assert_compile(
stmt.group_by(func.cube(t.c.x, t.c.y)),
# test it in a SELECT
self.assert_compile(
- select([func.count(table1.c.myid)]),
+ select(func.count(table1.c.myid)),
"SELECT count(mytable.myid) AS count_1 FROM mytable",
)
# test a "dotted" function name
self.assert_compile(
- select([func.foo.bar.lala(table1.c.myid)]),
+ select(func.foo.bar.lala(table1.c.myid)),
"SELECT foo.bar.lala(mytable.myid) AS lala_1 FROM mytable",
)
# test the bind parameter name with a "dotted" function name is
# only the name (limits the length of the bind param name)
self.assert_compile(
- select([func.foo.bar.lala(12)]),
+ select(func.foo.bar.lala(12)),
"SELECT foo.bar.lala(:lala_2) AS lala_1",
)
users = table(
"users", column("id"), column("name"), column("fullname")
)
- calculate = select(
- [column("q"), column("z"), column("r")],
- from_obj=[
+ calculate = (
+ select(column("q"), column("z"), column("r"))
+ .select_from(
func.calculate(bindparam("x", None), bindparam("y", None))
- ],
- ).subquery()
+ )
+ .subquery()
+ )
self.assert_compile(
- select([users], users.c.id > calculate.c.z),
+ select(users).where(users.c.id > calculate.c.z),
"SELECT users.id, users.name, users.fullname "
"FROM users, (SELECT q, z, r "
"FROM calculate(:x, :y)) AS anon_1 "
"WHERE users.id > anon_1.z",
)
- s = select(
- [users],
+ s = select(users).where(
users.c.id.between(
calculate.alias("c1").unique_params(x=17, y=45).c.z,
calculate.alias("c2").unique_params(x=5, y=12).c.z,
def test_select_method_two(self):
expr = func.rows("foo")
self.assert_compile(
- select(["*"]).select_from(expr.select().subquery()),
+ select("*").select_from(expr.select().subquery()),
"SELECT * FROM (SELECT rows(:rows_2) AS rows_1) AS anon_1",
)
def test_select_method_three(self):
expr = func.rows("foo")
self.assert_compile(
- select([column("foo")]).select_from(expr),
+ select(column("foo")).select_from(expr),
"SELECT foo FROM rows(:rows_1)",
)
def test_alias_method_two(self):
expr = func.rows("foo")
self.assert_compile(
- select(["*"]).select_from(expr.alias("bar")),
+ select("*").select_from(expr.alias("bar")),
"SELECT * FROM rows(:rows_1) AS bar",
)
# testing here that the expression exports its column
# list in a way that at least doesn't break.
self.assert_compile(
- select([expr]), "SELECT bar.rows_1 FROM rows(:rows_2) AS bar"
+ select(expr), "SELECT bar.rows_1 FROM rows(:rows_2) AS bar"
)
def test_alias_method_columns_two(self):
def test_funcfilter_label(self):
self.assert_compile(
select(
- [
- func.count(1)
- .filter(table1.c.description != None) # noqa
- .label("foo")
- ]
+ func.count(1)
+ .filter(table1.c.description != None) # noqa
+ .label("foo")
),
"SELECT count(:count_1) FILTER (WHERE mytable.description "
"IS NOT NULL) AS foo FROM mytable",
# from func:
self.assert_compile(
select(
- [
- func.max(table1.c.name).filter(
- literal_column("description") != None # noqa
- )
- ]
+ func.max(table1.c.name).filter(
+ literal_column("description") != None # noqa
+ )
),
"SELECT max(mytable.name) FILTER (WHERE description "
"IS NOT NULL) AS anon_1 FROM mytable",
def test_funcfilter_fromobj_fromcriterion(self):
# from criterion:
self.assert_compile(
- select([func.count(1).filter(table1.c.name == "name")]),
+ select(func.count(1).filter(table1.c.name == "name")),
"SELECT count(:count_1) FILTER (WHERE mytable.name = :name_1) "
"AS anon_1 FROM mytable",
)
# test chaining:
self.assert_compile(
select(
- [
- func.count(1)
- .filter(table1.c.name == "name")
- .filter(table1.c.description == "description")
- ]
+ func.count(1)
+ .filter(table1.c.name == "name")
+ .filter(table1.c.description == "description")
),
"SELECT count(:count_1) FILTER (WHERE "
"mytable.name = :name_1 AND mytable.description = :description_1) "
# test filtered windowing:
self.assert_compile(
select(
- [
- func.rank()
- .filter(table1.c.name > "foo")
- .over(order_by=table1.c.name)
- ]
+ func.rank()
+ .filter(table1.c.name > "foo")
+ .over(order_by=table1.c.name)
),
"SELECT rank() FILTER (WHERE mytable.name > :name_1) "
"OVER (ORDER BY mytable.name) AS anon_1 FROM mytable",
def test_funcfilter_windowing_orderby_partitionby(self):
self.assert_compile(
select(
- [
- func.rank()
- .filter(table1.c.name > "foo")
- .over(order_by=table1.c.name, partition_by=["description"])
- ]
+ func.rank()
+ .filter(table1.c.name > "foo")
+ .over(order_by=table1.c.name, partition_by=["description"])
),
"SELECT rank() FILTER (WHERE mytable.name > :name_1) "
"OVER (PARTITION BY mytable.description ORDER BY mytable.name) "
def test_funcfilter_windowing_range(self):
self.assert_compile(
select(
- [
- func.rank()
- .filter(table1.c.name > "foo")
- .over(range_=(1, 5), partition_by=["description"])
- ]
+ func.rank()
+ .filter(table1.c.name > "foo")
+ .over(range_=(1, 5), partition_by=["description"])
),
"SELECT rank() FILTER (WHERE mytable.name > :name_1) "
"OVER (PARTITION BY mytable.description RANGE BETWEEN :param_1 "
def test_funcfilter_windowing_rows(self):
self.assert_compile(
select(
- [
- func.rank()
- .filter(table1.c.name > "foo")
- .over(rows=(1, 5), partition_by=["description"])
- ]
+ func.rank()
+ .filter(table1.c.name > "foo")
+ .over(rows=(1, 5), partition_by=["description"])
),
"SELECT rank() FILTER (WHERE mytable.name > :name_1) "
"OVER (PARTITION BY mytable.description ROWS BETWEEN :param_1 "
def test_funcfilter_within_group(self):
stmt = select(
- [
- table1.c.myid,
- func.percentile_cont(0.5).within_group(table1.c.name),
- ]
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(table1.c.name),
)
self.assert_compile(
stmt,
def test_funcfilter_within_group_multi(self):
stmt = select(
- [
- table1.c.myid,
- func.percentile_cont(0.5).within_group(
- table1.c.name, table1.c.description
- ),
- ]
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name, table1.c.description
+ ),
)
self.assert_compile(
stmt,
def test_funcfilter_within_group_desc(self):
stmt = select(
- [
- table1.c.myid,
- func.percentile_cont(0.5).within_group(table1.c.name.desc()),
- ]
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(table1.c.name.desc()),
)
self.assert_compile(
stmt,
def test_funcfilter_within_group_w_over(self):
stmt = select(
- [
- table1.c.myid,
- func.percentile_cont(0.5)
- .within_group(table1.c.name.desc())
- .over(partition_by=table1.c.description),
- ]
+ table1.c.myid,
+ func.percentile_cont(0.5)
+ .within_group(table1.c.name.desc())
+ .over(partition_by=table1.c.description),
)
self.assert_compile(
stmt,
stuff="hi",
)
- res = sorted(connection.execute(select([t2.c.value, t2.c.stuff])))
+ res = sorted(connection.execute(select(t2.c.value, t2.c.stuff)))
eq_(res, [(-14, "hi"), (3, None), (7, None)])
connection.execute(
stuff="some stuff",
)
eq_(
- connection.execute(select([t2.c.value, t2.c.stuff])).fetchall(),
+ connection.execute(select(t2.c.value, t2.c.stuff)).fetchall(),
[(9, "some stuff"), (9, "some stuff"), (9, "some stuff")],
)
connection.execute(t2.update(values=dict(value=func.length("asfda"))))
eq_(
- connection.execute(select([t2.c.value, t2.c.stuff])).first(),
+ connection.execute(select(t2.c.value, t2.c.stuff)).first(),
(5, "thisisstuff"),
)
)
eq_(
- connection.execute(select([t2.c.value, t2.c.stuff])).first(),
+ connection.execute(select(t2.c.value, t2.c.stuff)).first(),
(9, "foo"),
)
).scalar()
z = connection.scalar(func.current_date(bind=testing.db))
w = connection.scalar(
- select(["*"], from_obj=[func.current_date(bind=testing.db)])
+ select("*").select_from(func.current_date(bind=testing.db))
)
assert x == y == z == w
date = datetime.date(2010, 5, 1)
def execute(field):
- return connection.execute(select([extract(field, date)])).scalar()
+ return connection.execute(select(extract(field, date))).scalar()
assert execute("year") == 2010
assert execute("month") == 5
},
)
rs = connection.execute(
- select([extract("year", table.c.dt), extract("month", table.c.d)])
+ select(extract("year", table.c.dt), extract("month", table.c.d))
)
row = rs.first()
assert row[0] == 2010
Column(
"col2",
Integer,
- default=select([func.coalesce(func.max(foo.c.id))]),
+ default=select(func.coalesce(func.max(foo.c.id))),
),
)
def test_insert_from_select_returning(self):
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = (
def test_insert_from_select_select(self):
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = self.tables.myothertable.insert().from_select(
Column("data", String),
)
- stmt = t1.insert().from_select(("data",), select([t1.c.data]))
+ stmt = t1.insert().from_select(("data",), select(t1.c.data))
self.assert_compile(
stmt,
def test_insert_from_select_cte_one(self):
table1 = self.tables.mytable
- cte = select([table1.c.name]).where(table1.c.name == "bar").cte()
+ cte = select(table1.c.name).where(table1.c.name == "bar").cte()
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == cte.c.name
)
table1 = self.tables.mytable
- cte = select([table1.c.name]).where(table1.c.name == "bar").cte()
+ cte = select(table1.c.name).where(table1.c.name == "bar").cte()
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == cte.c.name
)
def test_insert_from_select_select_alt_ordering(self):
table1 = self.tables.mytable
- sel = select([table1.c.name, table1.c.myid]).where(
+ sel = select(table1.c.name, table1.c.myid).where(
table1.c.name == "foo"
)
ins = self.tables.myothertable.insert().from_select(
Column("foo", Integer, default=func.foobar()),
)
table1 = self.tables.mytable
- sel = select([table1.c.myid]).where(table1.c.name == "foo")
+ sel = select(table1.c.myid).where(table1.c.name == "foo")
ins = table.insert().from_select(["id"], sel, include_defaults=False)
self.assert_compile(
ins,
Column("foo", Integer, default=func.foobar()),
)
table1 = self.tables.mytable
- sel = select([table1.c.myid]).where(table1.c.name == "foo")
+ sel = select(table1.c.myid).where(table1.c.name == "foo")
ins = table.insert().from_select(["id"], sel)
self.assert_compile(
ins,
Column("foo", Integer, default=12),
)
table1 = self.tables.mytable
- sel = select([table1.c.myid]).where(table1.c.name == "foo")
+ sel = select(table1.c.myid).where(table1.c.name == "foo")
ins = table.insert().from_select(["id"], sel)
self.assert_compile(
ins,
Column("foo", Integer, default=12),
)
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.myid.label("q")]).where(
+ sel = select(table1.c.myid, table1.c.myid.label("q")).where(
table1.c.name == "foo"
)
ins = table.insert().from_select(["id", "foo"], sel)
Column("foo", Integer, default=foo),
)
table1 = self.tables.mytable
- sel = select([table1.c.myid]).where(table1.c.name == "foo")
+ sel = select(table1.c.myid).where(table1.c.name == "foo")
ins = table.insert().from_select(["id"], sel)
self.assert_compile(
ins,
Column("bar", String, default="baz"),
)
- stmt = select([table_.c.foo])
+ stmt = select(table_.c.foo)
insert = table_.insert().from_select(["foo"], stmt)
self.assert_compile(stmt, "SELECT mytable.foo FROM mytable")
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = self.tables.myothertable.insert().from_select(
def test_insert_mix_values_select_exception(self):
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = self.tables.myothertable.insert().values(othername="5")
name = column("name")
description = column("desc")
- sel = select([name, mytable.c.description]).union(
- select([name, description])
+ sel = select(name, mytable.c.description).union(
+ select(name, description)
)
ins = mytable.insert().from_select(
[mytable.c.name, mytable.c.description], sel
def test_insert_from_select_col_values(self):
table1 = self.tables.mytable
table2 = self.tables.myothertable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = table2.insert().from_select(
def test_insert_select(self):
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = self.tables.myothertable.insert().from_select(
def test_insert_select_return_defaults(self):
table1 = self.tables.mytable
- sel = select([table1.c.myid, table1.c.name]).where(
+ sel = select(table1.c.myid, table1.c.name).where(
table1.c.name == "foo"
)
ins = (
ta = table2.alias()
on = table1.c.this_is_the_data_column == ta.c.this_is_the_data_column
self.assert_compile(
- select([table1, ta])
+ select(table1, ta)
.select_from(table1.join(ta, on))
.where(ta.c.this_is_the_data_column == "data3"),
"SELECT "
s = table1.select(table1.c.this_is_the_primarykey_column == 4).alias(
"foo"
)
- s2 = select([s])
+ s2 = select(s)
compiled = s2.compile(dialect=self._length_fixture())
assert set(
compiled._create_result_map()["this_is_the_data_column"][1]
dialect = self._length_fixture()
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
- s = select([q]).apply_labels()
+ s = select(q).apply_labels()
self.assert_compile(
s,
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias(
"foo"
)
- x = select([q])
+ x = select(q)
compile_dialect = default.DefaultDialect(label_length=10)
self.assert_compile(
x,
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias(
"foo"
)
- x = select([q])
+ x = select(q)
compile_dialect = default.DefaultDialect(label_length=10)
self.assert_compile(
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias(
"foo"
)
- x = select([q])
+ x = select(q)
self.assert_compile(
x,
table1 = self.table1
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
- x = select([q], use_labels=True)
+ x = select(q).apply_labels()
compile_dialect = default.DefaultDialect(label_length=10)
self.assert_compile(
def test_adjustable_5(self):
table1 = self.table1
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
- x = select([q], use_labels=True)
+ x = select(q).apply_labels()
compile_dialect = default.DefaultDialect(label_length=4)
self.assert_compile(
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias(
"foo"
)
- x = select([q])
+ x = select(q)
dialect = default.DefaultDialect(label_length=10)
compiled = x.compile(dialect=dialect)
)
self.assert_compile(
- select([other_table, anon]).select_from(j1).apply_labels(),
+ select(other_table, anon).select_from(j1).apply_labels(),
"SELECT "
"other_thirty_characters_table_.id "
"AS other_thirty_characters__1, "
# 'abcde' is longer than 4, but rendered as itself
# needs to have all characters
- s = select([a1])
+ s = select(a1)
self.assert_compile(
- select([a1]), "SELECT asdf.abcde FROM a AS asdf", dialect=dialect
+ select(a1), "SELECT asdf.abcde FROM a AS asdf", dialect=dialect
)
compiled = s.compile(dialect=dialect)
assert set(compiled._create_result_map()["abcde"][1]).issuperset(
)
# column still there, but short label
- s = select([a1]).apply_labels()
+ s = select(a1).apply_labels()
self.assert_compile(
s, "SELECT asdf.abcde AS _1 FROM a AS asdf", dialect=dialect
)
"tablename", column("columnname_one"), column("columnn_1")
)
- stmt = select([table1]).apply_labels()
+ stmt = select(table1).apply_labels()
dialect = default.DefaultDialect(label_length=23)
self.assert_compile(
self.assert_compile(
select(
- [
- table1.c.name,
- table1.c.name,
- expr(table1.c.name),
- expr(table1.c.name),
- ]
+ table1.c.name,
+ table1.c.name,
+ expr(table1.c.name),
+ expr(table1.c.name),
),
"SELECT some_table.name, some_table.name, "
"SOME_COL_THING(some_table.name) AS name, "
table1 = self.table1
self.assert_compile(
- select([table1.c.name + "foo", expr(table1.c.name + "foo")]),
+ select(table1.c.name + "foo", expr(table1.c.name + "foo")),
"SELECT some_table.name || :name_1 AS anon_1, "
"SOME_COL_THING(some_table.name || :name_2) AS anon_2 "
"FROM some_table",
self.assert_compile(
select(
- [table1.c.name + "foo", expr(table1.c.name + "foo")]
+ table1.c.name + "foo", expr(table1.c.name + "foo")
).apply_labels(),
"SELECT some_table.name || :name_1 AS anon_1, "
"SOME_COL_THING(some_table.name || :name_2) AS anon_2 "
self.assert_compile(
select(
- [
- expr(table1.c.name.label("foo")),
- table1.c.name.label("bar"),
- table1.c.value,
- ]
+ expr(table1.c.name.label("foo")),
+ table1.c.name.label("bar"),
+ table1.c.value,
),
"SELECT SOME_COL_THING(some_table.name) AS foo, "
"some_table.name AS bar, some_table.value FROM some_table",
self.assert_compile(
select(
- [
- cast(table1.c.name, Integer),
- cast(table1.c.name, String),
- table1.c.name,
- ]
+ cast(table1.c.name, Integer),
+ cast(table1.c.name, String),
+ table1.c.name,
),
"SELECT CAST(some_table.name AS INTEGER) AS name, "
"CAST(some_table.name AS VARCHAR) AS name, "
self.assert_compile(
select(
- [
- type_coerce(table1.c.name, Integer),
- type_coerce(table1.c.name, String),
- table1.c.name,
- ]
+ type_coerce(table1.c.name, Integer),
+ type_coerce(table1.c.name, String),
+ table1.c.name,
),
# ideally type_coerce wouldn't label at all...
"SELECT some_table.name AS name, "
col = column("value", Boolean)
self.assert_compile(
- select([~col, col]),
+ select(~col, col),
# not sure if this SQL is right but this is what it was
# before the new labeling, just different label name
"SELECT value = 0 AS value, value",
self.assert_compile(
select(
- [
- expr(table1.c.name.label("foo")),
- table1.c.name.label("bar"),
- table1.c.value,
- ]
+ expr(table1.c.name.label("foo")),
+ table1.c.name.label("bar"),
+ table1.c.value,
).apply_labels(),
# the expr around label is treated the same way as plain column
# with label
self.assert_compile(
select(
- [
- table1.c.name,
- table1.c.name,
- expr(table1.c.name),
- expr(table1.c.name),
- ]
+ table1.c.name,
+ table1.c.name,
+ expr(table1.c.name),
+ expr(table1.c.name),
).apply_labels(),
"SELECT some_table.name AS some_table_name, "
"some_table.name AS some_table_name__1, "
table1 = self.table1
self.assert_compile(
- select([table1.c.name, expr(table1.c.value)]).apply_labels(),
+ select(table1.c.name, expr(table1.c.value)).apply_labels(),
"SELECT some_table.name AS some_table_name, "
"SOME_COL_THING(some_table.value) "
"AS some_table_value FROM some_table",
y = 5
def go():
- return select([t1]).where(lambda: and_(t1.c.q == x, t1.c.p == y))
+ return select(t1).where(lambda: and_(t1.c.q == x, t1.c.p == y))
self.assert_compile(
go(), "SELECT t1.q, t1.p FROM t1 WHERE t1.q = :x_1 AND t1.p = :y_1"
global_y = 17
def go():
- return select([t1]).where(
+ return select(t1).where(
lambda: and_(t1.c.q == global_x, t1.c.p == global_y)
)
assert_raises_message(
exc.ArgumentError,
"SQL expression for WHERE/HAVING role expected, got 5",
- select([column("q")]).where,
+ select(column("q")).where,
5,
)
{"compile_state_plugin": "x", "plugin_subject": "y"}
)
- stmt = lambdas.lambda_stmt(lambda: select([col]))
+ stmt = lambdas.lambda_stmt(lambda: select(col))
eq_(
stmt._propagate_attrs,
{"compile_state_plugin": "x", "plugin_subject": "y"}
)
- stmt = select([lambda: col])
+ stmt = select(lambda: col)
eq_(
stmt._propagate_attrs,
def test_select_legacy_expanding_columns(self):
q, p, r = column("q"), column("p"), column("r")
- stmt = select([lambda: (q, p, r)])
+ stmt = select(lambda: (q, p, r))
self.assert_compile(stmt, "SELECT q, p, r")
t2 = table("t2", column("y"))
def go():
- return select([t1]).select_from(
+ return select(t1).select_from(
lambda: join(t1, t2, lambda: t1.c.q == t2.c.y)
)
def test_in_parameters_one(self):
- expr1 = select([1]).where(column("q").in_(["a", "b", "c"]))
+ expr1 = select(1).where(column("q").in_(["a", "b", "c"]))
self.assert_compile(expr1, "SELECT 1 WHERE q IN ([POSTCOMPILE_q_1])")
self.assert_compile(
)
def test_in_parameters_two(self):
- expr2 = select([1]).where(lambda: column("q").in_(["a", "b", "c"]))
+ expr2 = select(1).where(lambda: column("q").in_(["a", "b", "c"]))
self.assert_compile(expr2, "SELECT 1 WHERE q IN ([POSTCOMPILE_q_1])")
self.assert_compile(
expr2,
def test_in_parameters_three(self):
expr3 = lambdas.lambda_stmt(
- lambda: select([1]).where(column("q").in_(["a", "b", "c"]))
+ lambda: select(1).where(column("q").in_(["a", "b", "c"]))
)
self.assert_compile(expr3, "SELECT 1 WHERE q IN ([POSTCOMPILE_q_1])")
self.assert_compile(
def test_in_parameters_four(self):
def go(names):
return lambdas.lambda_stmt(
- lambda: select([1]).where(column("q").in_(names))
+ lambda: select(1).where(column("q").in_(names))
)
expr4 = go(["a", "b", "c"])
def test_in_parameters_five(self):
def go(n1, n2):
stmt = lambdas.lambda_stmt(
- lambda: select([1]).where(column("q").in_(n1))
+ lambda: select(1).where(column("q").in_(n1))
)
stmt += lambda s: s.where(column("y").in_(n2))
return stmt
g = 5
def go():
- return select([lambda: t1.c.q, lambda: t1.c.p + g])
+ return select(lambda: t1.c.q, lambda: t1.c.p + g)
stmt = go()
self.assert_compile(
users, addresses = user_address_fixture
stmt = (
- select([users])
+ select(users)
.select_from(
users.join(
addresses, lambda: users.c.id == addresses.c.user_id
users, addresses = user_address_fixture
def go(name):
- stmt = select([lambda: users.c.id]).where(
+ stmt = select(lambda: users.c.id).where(
lambda: users.c.name == name
)
with testing.db.connect().execution_options(
def go(name):
stmt = lambda_stmt(
- lambda: select([users.c.id]).where( # noqa
- users.c.name == name
- )
+ lambda: select(users.c.id).where(users.c.name == name) # noqa
)
with testing.db.connect().execution_options(
cache = {}
def go(name):
- stmt = select([lambda: users.c.id]).where(
+ stmt = select(lambda: users.c.id).where(
lambda: users.c.name == name
)
def go(name):
stmt = lambda_stmt(
- lambda: select([users.c.id]).where( # noqa
- users.c.name == name
- )
+ lambda: select(users.c.id).where(users.c.name == name) # noqa
)
with testing.db.connect().execution_options(
def test_standalone(self):
table1 = self.tables.people
- subq = select([table1.c.people_id]).subquery()
+ subq = select(table1.c.people_id).subquery()
# alias name is not rendered because subquery is not
# in the context of a FROM clause
def test_standalone_implicit_subquery(self):
table1 = self.tables.people
- subq = select([table1.c.people_id])
+ subq = select(table1.c.people_id)
# alias name is not rendered because subquery is not
# in the context of a FROM clause
def test_select_from(self):
table1 = self.tables.people
- subq = select([table1.c.people_id]).subquery()
+ subq = select(table1.c.people_id).subquery()
# in a FROM context, now you get "AS alias" and column labeling
self.assert_compile(
- select([subq.lateral(name="alias")]),
+ select(subq.lateral(name="alias")),
"SELECT alias.people_id FROM LATERAL "
"(SELECT people.people_id AS people_id FROM people) AS alias",
)
def test_select_from_implicit_subquery(self):
table1 = self.tables.people
- subq = select([table1.c.people_id])
+ subq = select(table1.c.people_id)
# in a FROM context, now you get "AS alias" and column labeling
self.assert_compile(
- select([subq.lateral(name="alias")]),
+ select(subq.lateral(name="alias")),
"SELECT alias.people_id FROM LATERAL "
"(SELECT people.people_id AS people_id FROM people) AS alias",
)
# in a FROM context, now you get "AS alias" and column labeling
self.assert_compile(
- select([subq.lateral(name="alias")]),
+ select(subq.lateral(name="alias")),
"SELECT alias.people_id FROM LATERAL "
"(SELECT people_id FROM people) AS alias",
)
def test_plain_join(self):
table1 = self.tables.people
table2 = self.tables.books
- subq = select([table2.c.book_id]).where(
+ subq = select(table2.c.book_id).where(
table2.c.book_owner_id == table1.c.people_id
)
# put it in correct context, implicit correlation works fine
self.assert_compile(
- select([table1]).select_from(
+ select(table1).select_from(
join(table1, lateral(subq.subquery(), name="alias"), true())
),
"SELECT people.people_id, people.age, people.name "
# explicit correlation
subq = subq.correlate(table1)
self.assert_compile(
- select([table1]).select_from(
+ select(table1).select_from(
join(table1, lateral(subq.subquery(), name="alias"), true())
),
"SELECT people.people_id, people.age, people.name "
def test_plain_join_implicit_subquery(self):
table1 = self.tables.people
table2 = self.tables.books
- subq = select([table2.c.book_id]).where(
+ subq = select(table2.c.book_id).where(
table2.c.book_owner_id == table1.c.people_id
)
# put it in correct context, implicit correlation works fine
self.assert_compile(
- select([table1]).select_from(
+ select(table1).select_from(
join(table1, lateral(subq, name="alias"), true())
),
"SELECT people.people_id, people.age, people.name "
# explicit correlation
subq = subq.correlate(table1)
self.assert_compile(
- select([table1]).select_from(
+ select(table1).select_from(
join(table1, lateral(subq, name="alias"), true())
),
"SELECT people.people_id, people.age, people.name "
table2 = self.tables.books
subq = (
- select([table2.c.book_id])
+ select(table2.c.book_id)
.correlate(table1)
.where(table1.c.people_id == table2.c.book_owner_id)
.subquery()
.lateral()
)
- stmt = select([table1, subq.c.book_id]).select_from(
+ stmt = select(table1, subq.c.book_id).select_from(
table1.join(subq, true())
)
table2 = self.tables.books
subq = (
- select([table2.c.book_id])
+ select(table2.c.book_id)
.correlate(table1)
.where(table1.c.people_id == table2.c.book_owner_id)
.lateral()
)
- stmt = select([table1, subq.c.book_id]).select_from(
+ stmt = select(table1, subq.c.book_id).select_from(
table1.join(subq, true())
)
srf = lateral(func.generate_series(1, bookcases.c.bookcase_shelves))
self.assert_compile(
- select([bookcases]).select_from(bookcases.join(srf, true())),
+ select(bookcases).select_from(bookcases.join(srf, true())),
"SELECT bookcases.bookcase_id, bookcases.bookcase_owner_id, "
"bookcases.bookcase_shelves, bookcases.bookcase_width "
"FROM bookcases JOIN "
def test_column_references_derived(self):
t1, t2, t3 = self._single_fixture()
- s1 = tsa.select([tsa.select([t1]).alias()]).subquery()
+ s1 = tsa.select(tsa.select(t1).alias()).subquery()
assert t2.c.a.references(s1.c.a)
assert not t2.c.a.references(s1.c.b)
def test_derived_column_references(self):
t1, t2, t3 = self._single_fixture()
- s1 = tsa.select([tsa.select([t2]).alias()]).subquery()
+ s1 = tsa.select(tsa.select(t2).alias()).subquery()
assert s1.c.a.references(t1.c.a)
assert not s1.c.a.references(t1.c.b)
Column("id", Integer, ForeignKey("t1.id"), primary_key=True),
)
- s = tsa.select([t2]).subquery()
+ s = tsa.select(t2).subquery()
t2fk = list(t2.c.id.foreign_keys)[0]
sfk = list(s.c.id.foreign_keys)[0]
eq_(t1.c.name.my_goofy_thing(), "hi")
# create proxy
- s = select([t1.select().alias()])
+ s = select(t1.select().alias())
# proxy has goofy thing
eq_(s.subquery().c.name.my_goofy_thing(), "hi")
# compile works
self.assert_compile(
- select([t1.select().alias()]),
+ select(t1.select().alias()),
"SELECT anon_1.id-, anon_1.name- FROM "
"(SELECT foo.id- AS id, foo.name- AS name "
"FROM foo) AS anon_1",
"'test.sql.test_metadata..*MyColumn'> "
"object. Ensure the class includes a _constructor()",
getattr,
- select([t1.select().alias()]).subquery(),
+ select(t1.select().alias()).subquery(),
"c",
)
def test_one(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).where(c),
+ select(c).where(c),
"SELECT x WHERE x",
dialect=self._dialect(True),
)
def test_two_a(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).where(c),
+ select(c).where(c),
"SELECT x WHERE x = 1",
dialect=self._dialect(False),
)
def test_two_b(self):
c = column("x", Boolean)
self.assert_compile(
- select([c], whereclause=c),
+ select(c).where(c),
"SELECT x WHERE x = 1",
dialect=self._dialect(False),
)
def test_three_a(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).where(~c),
+ select(c).where(~c),
"SELECT x WHERE x = 0",
dialect=self._dialect(False),
)
def test_three_a_double(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).where(~~c),
+ select(c).where(~~c),
"SELECT x WHERE x = 1",
dialect=self._dialect(False),
)
def test_three_b(self):
c = column("x", Boolean)
self.assert_compile(
- select([c], whereclause=~c),
+ select(c).where(~c),
"SELECT x WHERE x = 0",
dialect=self._dialect(False),
)
def test_four(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).where(~c),
+ select(c).where(~c),
"SELECT x WHERE NOT x",
dialect=self._dialect(True),
)
def test_four_double(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).where(~~c),
+ select(c).where(~~c),
"SELECT x WHERE x",
dialect=self._dialect(True),
)
def test_five_a(self):
c = column("x", Boolean)
self.assert_compile(
- select([c]).having(c),
+ select(c).having(c),
"SELECT x HAVING x = 1",
dialect=self._dialect(False),
)
def test_five_b(self):
c = column("x", Boolean)
self.assert_compile(
- select([c], having=c),
+ select(c).having(c),
"SELECT x HAVING x = 1",
dialect=self._dialect(False),
)
def test_six_pt_five(self):
x = column("x")
self.assert_compile(
- select([x]).where(or_(x == 7, true())), "SELECT x WHERE true"
+ select(x).where(or_(x == 7, true())), "SELECT x WHERE true"
)
self.assert_compile(
- select([x]).where(or_(x == 7, true())),
+ select(x).where(or_(x == 7, true())),
"SELECT x WHERE 1 = 1",
dialect=default.DefaultDialect(supports_native_boolean=False),
)
def test_eleven(self):
x = column("x")
self.assert_compile(
- select([x]).where(None).where(None), "SELECT x WHERE NULL AND NULL"
+ select(x).where(None).where(None), "SELECT x WHERE NULL AND NULL"
)
def test_twelve(self):
x = column("x")
self.assert_compile(
- select([x]).where(and_(None, None)), "SELECT x WHERE NULL AND NULL"
+ select(x).where(and_(None, None)), "SELECT x WHERE NULL AND NULL"
)
def test_thirteen(self):
x = column("x")
self.assert_compile(
- select([x]).where(~and_(None, None)),
+ select(x).where(~and_(None, None)),
"SELECT x WHERE NOT (NULL AND NULL)",
)
def test_fourteen(self):
x = column("x")
self.assert_compile(
- select([x]).where(~null()), "SELECT x WHERE NOT NULL"
+ select(x).where(~null()), "SELECT x WHERE NOT NULL"
)
def test_constants_are_singleton(self):
def test_constant_render_distinct(self):
self.assert_compile(
- select([null(), null()]), "SELECT NULL AS anon_1, NULL AS anon__1"
+ select(null(), null()), "SELECT NULL AS anon_1, NULL AS anon__1"
)
self.assert_compile(
- select([true(), true()]), "SELECT true AS anon_1, true AS anon__1"
+ select(true(), true()), "SELECT true AS anon_1, true AS anon__1"
)
self.assert_compile(
- select([false(), false()]),
+ select(false(), false()),
"SELECT false AS anon_1, false AS anon__1",
)
def test_constant_render_distinct_use_labels(self):
self.assert_compile(
- select([null(), null()]).apply_labels(),
+ select(null(), null()).apply_labels(),
"SELECT NULL AS anon_1, NULL AS anon__1",
)
self.assert_compile(
- select([true(), true()]).apply_labels(),
+ select(true(), true()).apply_labels(),
"SELECT true AS anon_1, true AS anon__1",
)
self.assert_compile(
- select([false(), false()]).apply_labels(),
+ select(false(), false()).apply_labels(),
"SELECT false AS anon_1, false AS anon__1",
)
def test_operator_precedence_collate_5(self):
self.assert_compile(
- select([self.table1.c.name]).order_by(
+ select(self.table1.c.name).order_by(
self.table1.c.name.collate("utf-8").desc()
),
"SELECT mytable.name FROM mytable "
def test_operator_precedence_collate_6(self):
self.assert_compile(
- select([self.table1.c.name]).order_by(
+ select(self.table1.c.name).order_by(
self.table1.c.name.collate("utf-8").desc().nullslast()
),
"SELECT mytable.name FROM mytable "
def test_operator_precedence_collate_7(self):
self.assert_compile(
- select([self.table1.c.name]).order_by(
+ select(self.table1.c.name).order_by(
self.table1.c.name.collate("utf-8").asc()
),
"SELECT mytable.name FROM mytable "
def test_in_20(self):
self.assert_compile(
- self.table1.c.myid.in_(select([self.table2.c.otherid])),
+ self.table1.c.myid.in_(select(self.table2.c.otherid)),
"mytable.myid IN (SELECT myothertable.otherid FROM myothertable)",
)
def test_in_21(self):
self.assert_compile(
- ~self.table1.c.myid.in_(select([self.table2.c.otherid])),
+ ~self.table1.c.myid.in_(select(self.table2.c.otherid)),
"mytable.myid NOT IN "
"(SELECT myothertable.otherid FROM myothertable)",
)
def test_in_24(self):
self.assert_compile(
- select([self.table1.c.myid.in_(select([self.table2.c.otherid]))]),
+ select(self.table1.c.myid.in_(select(self.table2.c.otherid))),
"SELECT mytable.myid IN (SELECT myothertable.otherid "
"FROM myothertable) AS anon_1 FROM mytable",
)
def test_in_25(self):
self.assert_compile(
select(
- [
- self.table1.c.myid.in_(
- select([self.table2.c.otherid]).scalar_subquery()
- )
- ]
+ self.table1.c.myid.in_(
+ select(self.table2.c.otherid).scalar_subquery()
+ )
),
"SELECT mytable.myid IN (SELECT myothertable.otherid "
"FROM myothertable) AS anon_1 FROM mytable",
self.assert_compile(
self.table1.c.myid.in_(
union(
- select([self.table1.c.myid], self.table1.c.myid == 5),
- select([self.table1.c.myid], self.table1.c.myid == 12),
+ select(self.table1.c.myid).where(self.table1.c.myid == 5),
+ select(self.table1.c.myid).where(self.table1.c.myid == 12),
)
),
"mytable.myid IN ("
# test that putting a select in an IN clause does not
# blow away its ORDER BY clause
self.assert_compile(
- select(
- [self.table1, self.table2],
+ select(self.table1, self.table2)
+ .where(
self.table2.c.otherid.in_(
- select(
- [self.table2.c.otherid],
- order_by=[self.table2.c.othername],
- limit=10,
- correlate=False,
- )
- ),
- from_obj=[
- self.table1.join(
- self.table2,
- self.table1.c.myid == self.table2.c.otherid,
- )
- ],
- order_by=[self.table1.c.myid],
- ),
+ select(self.table2.c.otherid)
+ .order_by(self.table2.c.othername)
+ .limit(10)
+ .correlate(False),
+ )
+ )
+ .select_from(
+ self.table1.join(
+ self.table2, self.table1.c.myid == self.table2.c.otherid,
+ )
+ )
+ .order_by(self.table1.c.myid),
"SELECT mytable.myid, "
"myothertable.otherid, myothertable.othername FROM mytable "
"JOIN myothertable ON mytable.myid = myothertable.otherid "
assert not (self.table1.c.myid + 5)._is_implicitly_boolean
assert not not_(column("x", Boolean))._is_implicitly_boolean
assert (
- not select([self.table1.c.myid])
+ not select(self.table1.c.myid)
.scalar_subquery()
._is_implicitly_boolean
)
def test_in_select(self):
t = table("t", column("x"))
- stmt = select([t.c.x])
+ stmt = select(t.c.x)
self.assert_compile(column("q").in_(stmt), "q IN (SELECT t.x FROM t)")
def test_in_subquery_warning(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).subquery()
+ stmt = select(t.c.x).subquery()
with expect_warnings(
r"Coercing Subquery object into a select\(\) for use in "
def test_in_subquery_explicit(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).subquery()
+ stmt = select(t.c.x).subquery()
self.assert_compile(
column("q").in_(stmt.select()),
def test_in_subquery_alias_implicit(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).subquery().alias()
+ stmt = select(t.c.x).subquery().alias()
with expect_warnings(
r"Coercing Alias object into a select\(\) for use in "
def test_in_subquery_alias_explicit(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).subquery().alias()
+ stmt = select(t.c.x).subquery().alias()
self.assert_compile(
column("q").in_(stmt.select().scalar_subquery()),
def test_in_cte_implicit(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).cte()
+ stmt = select(t.c.x).cte()
with expect_warnings(
r"Coercing CTE object into a select\(\) for use in "
def test_in_cte_explicit(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).cte()
+ stmt = select(t.c.x).cte()
- s2 = select([column("q").in_(stmt.select().scalar_subquery())])
+ s2 = select(column("q").in_(stmt.select().scalar_subquery()))
self.assert_compile(
s2,
def test_in_cte_select(self):
t = table("t", column("x"))
- stmt = select([t.c.x]).cte()
+ stmt = select(t.c.x).cte()
- s2 = select([column("q").in_(stmt.select())])
+ s2 = select(column("q").in_(stmt.select()))
self.assert_compile(
s2,
t = t_fixture
self.assert_compile(
- 5
- == any_(select([t.c.data]).where(t.c.data < 10).scalar_subquery()),
+ 5 == any_(select(t.c.data).where(t.c.data < 10).scalar_subquery()),
":param_1 = ANY (SELECT tab1.data "
"FROM tab1 WHERE tab1.data < :data_1)",
checkparams={"data_1": 10, "param_1": 5},
self.assert_compile(
5
- == select([t.c.data])
- .where(t.c.data < 10)
- .scalar_subquery()
- .any_(),
+ == select(t.c.data).where(t.c.data < 10).scalar_subquery().any_(),
":param_1 = ANY (SELECT tab1.data "
"FROM tab1 WHERE tab1.data < :data_1)",
checkparams={"data_1": 10, "param_1": 5},
t = t_fixture
self.assert_compile(
- 5
- == all_(select([t.c.data]).where(t.c.data < 10).scalar_subquery()),
+ 5 == all_(select(t.c.data).where(t.c.data < 10).scalar_subquery()),
":param_1 = ALL (SELECT tab1.data "
"FROM tab1 WHERE tab1.data < :data_1)",
checkparams={"data_1": 10, "param_1": 5},
self.assert_compile(
5
- == select([t.c.data])
- .where(t.c.data < 10)
- .scalar_subquery()
- .all_(),
+ == select(t.c.data).where(t.c.data < 10).scalar_subquery().all_(),
":param_1 = ALL (SELECT tab1.data "
"FROM tab1 WHERE tab1.data < :data_1)",
checkparams={"data_1": 10, "param_1": 5},
concat = ("test: " + users.c.user_name).label("thedata")
eq_(
- connection.execute(
- select([concat]).order_by("thedata")
- ).fetchall(),
+ connection.execute(select(concat).order_by("thedata")).fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)],
)
eq_(
- connection.execute(
- select([concat]).order_by("thedata")
- ).fetchall(),
+ connection.execute(select(concat).order_by("thedata")).fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)],
)
concat = ("test: " + users.c.user_name).label("thedata")
eq_(
connection.execute(
- select([concat]).order_by(desc("thedata"))
+ select(concat).order_by(desc("thedata"))
).fetchall(),
[("test: jack",), ("test: fred",), ("test: ed",)],
)
concat = ("test: " + users.c.user_name).label("thedata")
eq_(
connection.execute(
- select([concat]).order_by(literal_column("thedata") + "x")
+ select(concat).order_by(literal_column("thedata") + "x")
).fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)],
)
def test_or_and_as_columns(self, connection):
true, false = literal(True), literal(False)
- eq_(connection.execute(select([and_(true, false)])).scalar(), False)
- eq_(connection.execute(select([and_(true, true)])).scalar(), True)
- eq_(connection.execute(select([or_(true, false)])).scalar(), True)
- eq_(connection.execute(select([or_(false, false)])).scalar(), False)
+ eq_(connection.execute(select(and_(true, false))).scalar(), False)
+ eq_(connection.execute(select(and_(true, true))).scalar(), True)
+ eq_(connection.execute(select(or_(true, false))).scalar(), True)
+ eq_(connection.execute(select(or_(false, false))).scalar(), False)
eq_(
- connection.execute(select([not_(or_(false, false))])).scalar(),
- True,
+ connection.execute(select(not_(or_(false, false)))).scalar(), True,
)
row = connection.execute(
- select(
- [or_(false, false).label("x"), and_(true, false).label("y")]
- )
+ select(or_(false, false).label("x"), and_(true, false).label("y"))
).first()
assert row.x == False # noqa
assert row.y == False # noqa
row = connection.execute(
- select([or_(true, false).label("x"), and_(true, false).label("y")])
+ select(or_(true, false).label("x"), and_(true, false).label("y"))
).first()
assert row.x == True # noqa
assert row.y == False # noqa
for expr, result in (
(
- select([users.c.user_id]).where(
+ select(users.c.user_id).where(
users.c.user_name.startswith("apple")
),
[(1,)],
),
(
- select([users.c.user_id]).where(
+ select(users.c.user_id).where(
users.c.user_name.contains("i % t")
),
[(5,)],
),
(
- select([users.c.user_id]).where(
+ select(users.c.user_id).where(
users.c.user_name.endswith("anas")
),
[(3,)],
),
(
- select([users.c.user_id]).where(
+ select(users.c.user_id).where(
users.c.user_name.contains("i % t", escape="&")
),
[(5,)],
eq_(
connection.execute(
- select([users.c.user_id]).where(users.c.user_name.ilike("one"))
+ select(users.c.user_id).where(users.c.user_name.ilike("one"))
).fetchall(),
[(1,), (3,), (4,)],
)
eq_(
connection.execute(
- select([users.c.user_id]).where(users.c.user_name.ilike("TWO"))
+ select(users.c.user_id).where(users.c.user_name.ilike("TWO"))
).fetchall(),
[(2,)],
)
if testing.against("postgresql"):
eq_(
connection.execute(
- select([users.c.user_id]).where(
+ select(users.c.user_id).where(
users.c.user_name.like("one")
)
).fetchall(),
)
eq_(
connection.execute(
- select([users.c.user_id]).where(
+ select(users.c.user_id).where(
users.c.user_name.like("TWO")
)
).fetchall(),
def test_compiled_execute(self, connection):
connection.execute(users.insert(), user_id=7, user_name="jack")
- s = select([users], users.c.user_id == bindparam("id")).compile()
+ s = select(users).where(users.c.user_id == bindparam("id")).compile()
eq_(connection.execute(s, id=7).first()._mapping["user_id"], 7)
def test_compiled_insert_execute(self, connection):
connection.execute(
users.insert().compile(), user_id=7, user_name="jack"
)
- s = select([users], users.c.user_id == bindparam("id")).compile()
+ s = select(users).where(users.c.user_id == bindparam("id")).compile()
eq_(connection.execute(s, id=7).first()._mapping["user_id"], 7)
def test_repeated_bindparams(self, connection):
return "INT_%d" % value
eq_(
- connection.scalar(select([cast("INT_5", type_=MyInteger)])),
- "INT_5",
+ connection.scalar(select(cast("INT_5", type_=MyInteger))), "INT_5",
)
eq_(
connection.scalar(
- select([cast("INT_5", type_=MyInteger).label("foo")])
+ select(cast("INT_5", type_=MyInteger).label("foo"))
),
"INT_5",
)
eq_(got, wanted)
for labels in False, True:
+
+ def go(stmt):
+ if labels:
+ stmt = stmt.apply_labels()
+ return stmt
+
a_eq(
users.select(order_by=[users.c.user_id], use_labels=labels),
[(1, "c"), (2, "b"), (3, "a")],
)
a_eq(
- select(
- [users.c.user_id.label("foo")],
- use_labels=labels,
- order_by=[users.c.user_id],
+ go(
+ select(users.c.user_id.label("foo")).order_by(
+ users.c.user_id
+ )
),
[(1,), (2,), (3,)],
)
a_eq(
- select(
- [users.c.user_id.label("foo"), users.c.user_name],
- use_labels=labels,
- order_by=[users.c.user_name, users.c.user_id],
+ go(
+ select(
+ users.c.user_id.label("foo"), users.c.user_name
+ ).order_by(users.c.user_name, users.c.user_id),
),
[(3, "a"), (2, "b"), (1, "c")],
)
)
a_eq(
- select(
- [users.c.user_id.label("foo")],
- distinct=True,
- use_labels=labels,
- order_by=[users.c.user_id],
+ go(
+ select(users.c.user_id.label("foo"))
+ .distinct()
+ .order_by(users.c.user_id),
),
[(1,), (2,), (3,)],
)
a_eq(
- select(
- [
+ go(
+ select(
users.c.user_id.label("a"),
users.c.user_id.label("b"),
users.c.user_name,
- ],
- use_labels=labels,
- order_by=[users.c.user_id],
+ ).order_by(users.c.user_id),
),
[(1, 1, "c"), (2, 2, "b"), (3, 3, "a")],
)
)
a_eq(
- select(
- [users.c.user_id.label("foo")],
- distinct=True,
- use_labels=labels,
- order_by=[users.c.user_id.desc()],
+ go(
+ select(users.c.user_id.label("foo"))
+ .distinct()
+ .order_by(users.c.user_id.desc()),
),
[(3,), (2,), (1,)],
)
)
stmt = (
- select([users])
+ select(users)
.where(users.c.user_name.in_(bindparam("uname", expanding=True)))
.order_by(users.c.user_id)
)
)
stmt = (
- select([users])
+ select(users)
.where(users.c.user_name.in_(bindparam("u35", expanding=True)))
.where(users.c.user_id == bindparam("u46"))
.order_by(users.c.user_id)
)
stmt = (
- select([users])
+ select(users)
.where(users.c.user_name.in_(bindparam("u.35", expanding=True)))
.where(users.c.user_id == bindparam("u.46"))
.order_by(users.c.user_id)
)
stmt = (
- select([users])
+ select(users)
.where(users.c.user_name.in_(bindparam("uname", expanding=True)))
.where(users.c.user_id.in_(bindparam("userid", expanding=True)))
.order_by(users.c.user_id)
)
stmt = (
- select([users])
+ select(users)
.where(
users.c.user_name.in_(bindparam("uname", expanding=True))
| users.c.user_name.in_(bindparam("uname2", expanding=True))
.where(users.c.user_id == 8)
)
stmt = stmt.union(
- select([users])
+ select(users)
.where(
users.c.user_name.in_(bindparam("uname", expanding=True))
| users.c.user_name.in_(bindparam("uname2", expanding=True))
)
stmt = (
- select([users])
+ select(users)
.where(
tuple_(users.c.user_id, users.c.user_name).in_(
bindparam("uname", expanding=True)
)
stmt = (
- select([users])
+ select(users)
.where(users.c.user_name.in_(bindparam("uname", expanding=True)))
.order_by(users.c.user_id)
)
def test_select_where(self):
stmt = (
- select([self.tables.foo])
+ select(self.tables.foo)
.where(self.tables.foo.c.data == bindparam("data"))
.where(self.tables.foo.c.x == bindparam("x"))
)
@testing.requires.standalone_binds
def test_select_columns(self):
- stmt = select([bindparam("data"), bindparam("x")])
+ stmt = select(bindparam("data"), bindparam("x"))
self._assert_raises(stmt, {"data": "data"})
def test_text(self):
[
x[0]
for x in connection.execute(
- select([addresses.c.address]).distinct().limit(3)
+ select(addresses.c.address).distinct().limit(3)
)
]
)
[
x[0]
for x in connection.execute(
- select([addresses.c.address])
+ select(addresses.c.address)
.distinct()
.offset(1)
.order_by(addresses.c.address)
"""Test the interaction between limit and limit/offset"""
r = connection.execute(
- select([addresses.c.address])
+ select(addresses.c.address)
.order_by(addresses.c.address)
.distinct()
.offset(2)
@testing.requires.subqueries
def test_union(self, connection):
(s1, s2) = (
- select(
- [t1.c.col3.label("col3"), t1.c.col4.label("col4")],
+ select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
),
- select(
- [t2.c.col3.label("col3"), t2.c.col4.label("col4")],
+ select(t2.c.col3.label("col3"), t2.c.col4.label("col4")).where(
t2.c.col2.in_(["t2col2r2", "t2col2r3"]),
),
)
@testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs")
def test_union_ordered(self, connection):
(s1, s2) = (
- select(
- [t1.c.col3.label("col3"), t1.c.col4.label("col4")],
+ select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
),
- select(
- [t2.c.col3.label("col3"), t2.c.col4.label("col4")],
+ select(t2.c.col3.label("col3"), t2.c.col4.label("col4")).where(
t2.c.col2.in_(["t2col2r2", "t2col2r3"]),
),
)
@testing.requires.subqueries
def test_union_ordered_alias(self, connection):
(s1, s2) = (
- select(
- [t1.c.col3.label("col3"), t1.c.col4.label("col4")],
+ select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
),
- select(
- [t2.c.col3.label("col3"), t2.c.col4.label("col4")],
+ select(t2.c.col3.label("col3"), t2.c.col4.label("col4")).where(
t2.c.col2.in_(["t2col2r2", "t2col2r3"]),
),
)
@testing.fails_on("sqlite", "FIXME: unknown")
def test_union_all(self, connection):
e = union_all(
- select([t1.c.col3]),
- union(select([t1.c.col3]), select([t1.c.col3])),
+ select(t1.c.col3), union(select(t1.c.col3), select(t1.c.col3)),
)
wanted = [("aaa",), ("aaa",), ("bbb",), ("bbb",), ("ccc",), ("ccc",)]
"""
- u = union(select([t1.c.col3]), select([t1.c.col3])).alias()
+ u = union(select(t1.c.col3), select(t1.c.col3)).alias()
- e = union_all(select([t1.c.col3]), select([u.c.col3]))
+ e = union_all(select(t1.c.col3), select(u.c.col3))
wanted = [("aaa",), ("aaa",), ("bbb",), ("bbb",), ("ccc",), ("ccc",)]
found1 = self._fetchall_sorted(connection.execute(e))
@testing.requires.intersect
def test_intersect(self, connection):
i = intersect(
- select([t2.c.col3, t2.c.col4]),
- select([t2.c.col3, t2.c.col4], t2.c.col4 == t3.c.col3),
+ select(t2.c.col3, t2.c.col4),
+ select(t2.c.col3, t2.c.col4).where(t2.c.col4 == t3.c.col3),
)
wanted = [("aaa", "bbb"), ("bbb", "ccc"), ("ccc", "aaa")]
def test_except_style1(self, connection):
e = except_(
union(
- select([t1.c.col3, t1.c.col4]),
- select([t2.c.col3, t2.c.col4]),
- select([t3.c.col3, t3.c.col4]),
+ select(t1.c.col3, t1.c.col4),
+ select(t2.c.col3, t2.c.col4),
+ select(t3.c.col3, t3.c.col4),
),
- select([t2.c.col3, t2.c.col4]),
+ select(t2.c.col3, t2.c.col4),
)
wanted = [
e = except_(
union(
- select([t1.c.col3, t1.c.col4]),
- select([t2.c.col3, t2.c.col4]),
- select([t3.c.col3, t3.c.col4]),
+ select(t1.c.col3, t1.c.col4),
+ select(t2.c.col3, t2.c.col4),
+ select(t3.c.col3, t3.c.col4),
)
.alias()
.select(),
- select([t2.c.col3, t2.c.col4]),
+ select(t2.c.col3, t2.c.col4),
)
wanted = [
def test_except_style3(self, connection):
# aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
e = except_(
- select([t1.c.col3]), # aaa, bbb, ccc
+ select(t1.c.col3), # aaa, bbb, ccc
except_(
- select([t2.c.col3]), # aaa, bbb, ccc
- select([t3.c.col3], t3.c.col3 == "ccc"), # ccc
+ select(t2.c.col3), # aaa, bbb, ccc
+ select(t3.c.col3).where(t3.c.col3 == "ccc"), # ccc
),
)
eq_(connection.execute(e).fetchall(), [("ccc",)])
def test_except_style4(self, connection):
# aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
e = except_(
- select([t1.c.col3]), # aaa, bbb, ccc
+ select(t1.c.col3), # aaa, bbb, ccc
except_(
- select([t2.c.col3]), # aaa, bbb, ccc
- select([t3.c.col3], t3.c.col3 == "ccc"), # ccc
+ select(t2.c.col3), # aaa, bbb, ccc
+ select(t3.c.col3).where(t3.c.col3 == "ccc"), # ccc
)
.alias()
.select(),
)
def test_intersect_unions(self, connection):
u = intersect(
- union(
- select([t1.c.col3, t1.c.col4]), select([t3.c.col3, t3.c.col4])
- ),
- union(
- select([t2.c.col3, t2.c.col4]), select([t3.c.col3, t3.c.col4])
- )
+ union(select(t1.c.col3, t1.c.col4), select(t3.c.col3, t3.c.col4)),
+ union(select(t2.c.col3, t2.c.col4), select(t3.c.col3, t3.c.col4))
.alias()
.select(),
)
@testing.requires.intersect
def test_intersect_unions_2(self, connection):
u = intersect(
- union(
- select([t1.c.col3, t1.c.col4]), select([t3.c.col3, t3.c.col4])
- )
+ union(select(t1.c.col3, t1.c.col4), select(t3.c.col3, t3.c.col4))
.alias()
.select(),
- union(
- select([t2.c.col3, t2.c.col4]), select([t3.c.col3, t3.c.col4])
- )
+ union(select(t2.c.col3, t2.c.col4), select(t3.c.col3, t3.c.col4))
.alias()
.select(),
)
@testing.requires.intersect
def test_intersect_unions_3(self, connection):
u = intersect(
- select([t2.c.col3, t2.c.col4]),
+ select(t2.c.col3, t2.c.col4),
union(
- select([t1.c.col3, t1.c.col4]),
- select([t2.c.col3, t2.c.col4]),
- select([t3.c.col3, t3.c.col4]),
+ select(t1.c.col3, t1.c.col4),
+ select(t2.c.col3, t2.c.col4),
+ select(t3.c.col3, t3.c.col4),
)
.alias()
.select(),
@testing.requires.intersect
def test_composite_alias(self, connection):
ua = intersect(
- select([t2.c.col3, t2.c.col4]),
+ select(t2.c.col3, t2.c.col4),
union(
- select([t1.c.col3, t1.c.col4]),
- select([t2.c.col3, t2.c.col4]),
- select([t3.c.col3, t3.c.col4]),
+ select(t1.c.col3, t1.c.col4),
+ select(t2.c.col3, t2.c.col4),
+ select(t3.c.col3, t3.c.col4),
)
.alias()
.select(),
"""Joins t1->t2."""
for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id], from_obj=[t1.join(t2, criteria)]
+ expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
+ t1.join(t2, criteria)
)
self.assertRows(expr, [(10, 20), (11, 21)])
"""Joins t1->t2->t3."""
for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id], from_obj=[t1.join(t2, criteria)]
+ expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
+ t1.join(t2, criteria)
)
self.assertRows(expr, [(10, 20), (11, 21)])
"""Outer joins t1->t2."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id],
- from_obj=[t1.join(t2).join(t3, criteria)],
+ expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
+ t1.join(t2).join(t3, criteria)
)
self.assertRows(expr, [(10, 20)])
"""Outer joins t1->t2,t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- from_obj=[
- t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
- t3, criteria
- )
- ],
+ expr = select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id).select_from(
+ t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
+ t3, criteria
+ )
)
self.assertRows(
expr, [(10, 20, 30), (11, 21, None), (12, None, None)]
"""Outer joins t1->t2,t3, where on t1."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t1.c.name == "t1 #10",
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t1.c.name == "t1 #10")
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t1.c.t1_id < 12,
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t1.c.t1_id < 12)
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
"""Outer joins t1->t2,t3, where on t2."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t2.c.name == "t2 #20",
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t2.c.name == "t2 #20")
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t2.c.t2_id < 29,
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t2.c.t2_id < 29)
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
"""Outer joins t1->t2,t3, where on t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t3.c.name == "t3 #30",
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t3.c.name == "t3 #30")
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t3.c.t3_id < 39,
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t3.c.t3_id < 39)
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
"""Outer joins t1->t2,t3, where on t1 and t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t1.c.name == "t1 #10", t3.c.name == "t3 #30"),
- from_obj=[
- (
- t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
- t3, criteria
- )
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t1.c.name == "t1 #10", t3.c.name == "t3 #30"))
+ .select_from(
+ t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
+ t3, criteria
)
- ],
+ )
)
+
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t1.c.t1_id < 19, t3.c.t3_id < 39),
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t1.c.t1_id < 19, t3.c.t3_id < 39))
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
"""Outer joins t1->t2,t3, where on t1 and t2."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"),
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"))
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t1.c.t1_id < 12, t2.c.t2_id < 39),
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t1.c.t1_id < 12, t2.c.t2_id < 39))
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
"""Outer joins t1->t2,t3, where on t1, t2 and t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(
- t1.c.name == "t1 #10",
- t2.c.name == "t2 #20",
- t3.c.name == "t3 #30",
- ),
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(
+ and_(
+ t1.c.name == "t1 #10",
+ t2.c.name == "t2 #20",
+ t3.c.name == "t3 #30",
+ )
+ )
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t1.c.t1_id < 19, t2.c.t2_id < 29, t3.c.t3_id < 39),
- from_obj=[
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t1.c.t1_id < 19, t2.c.t2_id < 29, t3.c.t3_id < 39))
+ .select_from(
(
t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).outerjoin(
t3, criteria
)
)
- ],
+ )
)
self.assertRows(expr, [(10, 20, 30)])
"""Joins t1->t2, outer t2->t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id).select_from(
+ (t1.join(t2).outerjoin(t3, criteria)),
)
print(expr)
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
"""Joins t1->t2, outer t2->t3, plus a where on each table in turn."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t1.c.name == "t1 #10",
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t1.c.name == "t1 #10",)
+ .select_from((t1.join(t2).outerjoin(t3, criteria)))
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t2.c.name == "t2 #20",
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t2.c.name == "t2 #20",)
+ .select_from((t1.join(t2).outerjoin(t3, criteria)))
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- t3.c.name == "t3 #30",
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(t3.c.name == "t3 #30",)
+ .select_from((t1.join(t2).outerjoin(t3, criteria)))
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"),
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t1.c.name == "t1 #10", t2.c.name == "t2 #20"),)
+ .select_from((t1.join(t2).outerjoin(t3, criteria)))
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(t2.c.name == "t2 #20", t3.c.name == "t3 #30"),
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(and_(t2.c.name == "t2 #20", t3.c.name == "t3 #30"),)
+ .select_from((t1.join(t2).outerjoin(t3, criteria)))
)
self.assertRows(expr, [(10, 20, 30)])
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- and_(
- t1.c.name == "t1 #10",
- t2.c.name == "t2 #20",
- t3.c.name == "t3 #30",
- ),
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))],
+ expr = (
+ select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
+ .where(
+ and_(
+ t1.c.name == "t1 #10",
+ t2.c.name == "t2 #20",
+ t3.c.name == "t3 #30",
+ ),
+ )
+ .select_from((t1.join(t2).outerjoin(t3, criteria)))
)
self.assertRows(expr, [(10, 20, 30)])
def test_modulo(self, connection):
eq_(
connection.execute(
- select([flds.c.intcol % 3], order_by=flds.c.idcol)
+ select(flds.c.intcol % 3).order_by(flds.c.idcol)
).fetchall(),
[(2,), (1,)],
)
eq_(
connection.execute(
select(
- [
- flds.c.intcol,
- func.row_number().over(order_by=flds.c.strcol),
- ]
+ flds.c.intcol,
+ func.row_number().over(order_by=flds.c.strcol),
)
).fetchall(),
[(13, 1), (5, 2)],
)
# Note that the names are quoted b/c they are reserved words
- x = select([table.c.col1, table.c["from"], table.c.order])
+ x = select(table.c.col1, table.c["from"], table.c.order)
self.assert_compile(
x,
"SELECT "
)
# Note that the names are now unquoted
- x = select([table.c.col1, table.c["from"], table.c.order])
+ x = select(table.c.col1, table.c["from"], table.c.order)
self.assert_compile(
x,
"SELECT "
metadata = MetaData()
t1 = Table("t1", metadata, Column("col1", Integer), schema="foo")
a = t1.select().alias("anon")
- b = select([1], a.c.col1 == 2, from_obj=a)
+ b = select(1).where(a.c.col1 == 2).select_from(a)
self.assert_compile(
b,
"SELECT 1 "
quote_schema=True,
)
a = t1.select().alias("anon")
- b = select([1], a.c.col1 == 2, from_obj=a)
+ b = select(1).where(a.c.col1 == 2).select_from(a)
self.assert_compile(
b,
"SELECT 1 "
metadata = MetaData()
t1 = Table("T1", metadata, Column("Col1", Integer), schema="Foo")
a = t1.select().alias("Anon")
- b = select([1], a.c.Col1 == 2, from_obj=a)
+ b = select(1).where(a.c.Col1 == 2).select_from(a)
self.assert_compile(
b,
"SELECT 1 "
quote_schema=False,
)
a = t1.select().alias("Anon")
- b = select([1], a.c.Col1 == 2, from_obj=a)
+ b = select(1).where(a.c.Col1 == 2).select_from(a)
self.assert_compile(
b,
"SELECT 1 "
t1 = Table("t1", m, Column("col1", Integer))
cl = t1.c.col1.label("ShouldQuote")
self.assert_compile(
- select([cl]).order_by(cl),
+ select(cl).order_by(cl),
'SELECT t1.col1 AS "ShouldQuote" FROM t1 ORDER BY "ShouldQuote"',
)
# Lower case names, should not quote
metadata = MetaData()
table = Table("t1", metadata, Column("col1", Integer))
- x = select([table.c.col1.label("label1")]).alias("alias1")
+ x = select(table.c.col1.label("label1")).alias("alias1")
self.assert_compile(
- select([x.c.label1]),
+ select(x.c.label1),
"SELECT "
"alias1.label1 "
"FROM ("
# Not lower case names, should quote
metadata = MetaData()
table = Table("T1", metadata, Column("Col1", Integer))
- x = select([table.c.Col1.label("Label1")]).alias("Alias1")
+ x = select(table.c.Col1.label("Label1")).alias("Alias1")
self.assert_compile(
- select([x.c.Label1]),
+ select(x.c.Label1),
"SELECT "
'"Alias1"."Label1" '
"FROM ("
col = sql.literal_column("NEEDS QUOTES").label("NEEDS QUOTES")
self.assert_compile(
- select([col]).alias().select(),
+ select(col).alias().select(),
'SELECT anon_1."NEEDS QUOTES" FROM (SELECT NEEDS QUOTES AS '
'"NEEDS QUOTES") AS anon_1',
)
col = sql.literal_column("NEEDS QUOTES").label("NEEDS QUOTES_")
self.assert_compile(
- select([col]).alias().select(),
+ select(col).alias().select(),
'SELECT anon_1."NEEDS QUOTES_" FROM (SELECT NEEDS QUOTES AS '
'"NEEDS QUOTES_") AS anon_1',
)
)
self.assert_compile(
- select([col]).alias().select(),
+ select(col).alias().select(),
'SELECT anon_1."NEEDS QUOTES" FROM '
'(SELECT NEEDS QUOTES AS "NEEDS QUOTES") AS anon_1',
)
)
self.assert_compile(
- select([col]).alias().select(),
+ select(col).alias().select(),
'SELECT anon_1."NEEDS QUOTES_" FROM '
'(SELECT NEEDS QUOTES AS "NEEDS QUOTES_") AS anon_1',
)
col = sql.literal_column('"NEEDS QUOTES"')
self.assert_compile(
- select([col]).alias().select(),
+ select(col).alias().select(),
'SELECT anon_1."NEEDS QUOTES" FROM '
'(SELECT "NEEDS QUOTES") AS anon_1',
)
t = Table("t", m, Column("x", Integer, quote=True))
self.assert_compile(
- select([t.alias()]).apply_labels(),
+ select(t.alias()).apply_labels(),
'SELECT t_1."x" AS "t_1_x" FROM t AS t_1',
)
t2 = Table("t2", m, Column("x", Integer), quote=True)
self.assert_compile(
- select([t2.c.x]).apply_labels(),
- 'SELECT "t2".x AS "t2_x" FROM "t2"',
+ select(t2.c.x).apply_labels(), 'SELECT "t2".x AS "t2_x" FROM "t2"',
)
)
sel = (
- select([users.c.user_id])
+ select(users.c.user_id)
.where(users.c.user_name == "jack")
.scalar_subquery()
)
not_in(bar.c.content_type, row._mapping)
row = connection.execute(
- select([func.now().label("content_type")])
+ select(func.now().label("content_type"))
).first()
not_in(content.c.type, row._mapping)
)
def test_column_error_printing(self, connection):
- result = connection.execute(select([1]))
+ result = connection.execute(select(1))
row = result.first()
class unprintable(object):
# this will create column() objects inside
# the select(), these need to match on name anyway
r = connection.execute(
- select([column("user_id"), column("user_name")])
+ select(column("user_id"), column("user_name"))
.select_from(table("users"))
.where(text("user_id=2"))
).first()
# unary expressions
r = connection.execute(
- select([users.c.user_name.distinct()]).order_by(users.c.user_name)
+ select(users.c.user_name.distinct()).order_by(users.c.user_name)
).first()
eq_(r._mapping[users.c.user_name], "john")
eq_(r.user_name, "john")
def test_column_accessor_err(self, connection):
- r = connection.execute(select([1])).first()
+ r = connection.execute(select(1)).first()
assert_raises_message(
AttributeError,
"Could not locate column in row for column 'foo'",
def test_row_case_sensitive(self, connection):
row = connection.execute(
select(
- [
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive"),
- ]
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
)
).first()
with engines.testing_engine().connect() as ins_conn:
row = ins_conn.execute(
select(
- [
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive"),
- text("3 AS screw_up_the_cols"),
- ]
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols"),
)
).first()
ua = users.alias()
u2 = users.alias()
result = connection.execute(
- select([users.c.user_id, ua.c.user_id]).select_from(
+ select(users.c.user_id, ua.c.user_id).select_from(
users.join(ua, true())
)
)
# but when they're fetched you'll get the ambiguous error.
connection.execute(users.insert(), user_id=1, user_name="john")
result = connection.execute(
- select([users.c.user_id, addresses.c.user_id]).select_from(
+ select(users.c.user_id, addresses.c.user_id).select_from(
users.outerjoin(addresses)
)
)
connection.execute(users.insert(), user_id=1, user_name="john")
result = connection.execute(
select(
- [
- users.c.user_id,
- type_coerce(users.c.user_id, Integer).label("foo"),
- ]
+ users.c.user_id,
+ type_coerce(users.c.user_id, Integer).label("foo"),
)
)
row = result.first()
connection.execute(users.insert(), user_id=1, user_name="foo")
result = connection.execute(
select(
- [
- users.c.user_id,
- users.c.user_name.label(None),
- func.count(literal_column("1")),
- ]
+ users.c.user_id,
+ users.c.user_name.label(None),
+ func.count(literal_column("1")),
).group_by(users.c.user_id, users.c.user_name)
)
"Statement is not a compiled expression construct.",
),
(
- select([1]),
+ select(1),
[
lambda r: r.last_inserted_params(),
lambda r: r.inserted_primary_key,
r"Statement is not an insert\(\) expression construct.",
),
(
- select([1]),
+ select(1),
[lambda r: r.last_updated_params()],
r"Statement is not an update\(\) expression construct.",
),
(
- select([1]),
+ select(1),
[lambda r: r.prefetch_cols(), lambda r: r.postfetch_cols()],
r"Statement is not an insert\(\) "
r"or update\(\) expression construct.",
def _test_keyed_targeting_no_label_at_all(self, expression, conn):
lt = literal_column("2")
- stmt = select([literal_column("1"), expression, lt]).select_from(
+ stmt = select(literal_column("1"), expression, lt).select_from(
self.tables.keyed1
)
row = conn.execute(stmt).first()
return "max(a)"
# assert that there is no "AS max_" or any label of any kind.
- eq_(str(select([not_named_max()])), "SELECT max(a)")
+ eq_(str(select(not_named_max())), "SELECT max(a)")
nnm = not_named_max()
self._test_keyed_targeting_no_label_at_all(nnm, connection)
return "max(a)"
# assert that there is no "AS max_" or any label of any kind.
- eq_(str(select([not_named_max()])), "SELECT max(a)")
+ eq_(str(select(not_named_max())), "SELECT max(a)")
nnm = not_named_max()
self._test_keyed_targeting_no_label_at_all(nnm, connection)
t1 = text("max(a)")
t2 = text("min(a)")
- stmt = select([t1, t2]).select_from(self.tables.keyed1)
+ stmt = select(t1, t2).select_from(self.tables.keyed1)
row = connection.execute(stmt).first()
eq_(row._mapping[t1], "a1")
keyed2 = self.tables.keyed2
row = connection.execute(
- select([keyed1, keyed2]).select_from(keyed1.join(keyed2, true()))
+ select(keyed1, keyed2).select_from(keyed1.join(keyed2, true()))
).first()
# column access is unambiguous
# illustrate why row.b above is ambiguous, and not "b2"; because
# if we didn't have keyed2, now it matches row.a. a new column
# shouldn't be able to grab the value from a previous column.
- row = connection.execute(select([keyed1])).first()
+ row = connection.execute(select(keyed1)).first()
eq_(row.b, "a1")
def test_keyed_accessor_composite_conflict_2_fix_w_uselabels(
keyed2 = self.tables.keyed2
row = connection.execute(
- select([keyed1, keyed2])
+ select(keyed1, keyed2)
.select_from(keyed1.join(keyed2, true()))
.apply_labels()
).first()
keyed4 = self.tables.keyed4
row = connection.execute(
- select([keyed1, keyed4]).select_from(keyed1.join(keyed4, true()))
+ select(keyed1, keyed4).select_from(keyed1.join(keyed4, true()))
).first()
eq_(row.b, "b4")
eq_(row.q, "q4")
keyed3 = self.tables.keyed3
row = connection.execute(
- select([keyed1, keyed3]).select_from(keyed1.join(keyed3, true()))
+ select(keyed1, keyed3).select_from(keyed1.join(keyed3, true()))
).first()
eq_(row.q, "c1")
keyed2 = self.tables.keyed2
row = connection.execute(
- select([keyed1, keyed2])
+ select(keyed1, keyed2)
.select_from(keyed1.join(keyed2, true()))
.apply_labels()
).first()
stmt = (
select(
- [
- keyed2.c.a,
- keyed3.c.a,
- keyed2.c.a,
- keyed2.c.a,
- keyed3.c.a,
- keyed3.c.a,
- keyed3.c.d,
- keyed3.c.d,
- ]
+ keyed2.c.a,
+ keyed3.c.a,
+ keyed2.c.a,
+ keyed2.c.a,
+ keyed3.c.a,
+ keyed3.c.a,
+ keyed3.c.d,
+ keyed3.c.d,
)
.select_from(keyed2.join(keyed3, true()))
.apply_labels()
# originally addressed by [ticket:2932], however liberalized
# Column-targeting rules are deprecated
a, b = sql.column("a"), sql.column("b")
- stmt = select([a, b]).select_from(table("keyed2"))
+ stmt = select(a, b).select_from(table("keyed2"))
row = connection.execute(stmt).first()
in_(a, row._mapping)
def test_columnclause_schema_column_two(self, connection):
keyed2 = self.tables.keyed2
- stmt = select([keyed2.c.a, keyed2.c.b])
+ stmt = select(keyed2.c.a, keyed2.c.b)
row = connection.execute(stmt).first()
in_(keyed2.c.a, row._mapping)
def _adapt_result_columns_fixture_one(self):
keyed1 = self.tables.keyed1
stmt = (
- select([keyed1.c.b, keyed1.c.q.label("foo")])
+ select(keyed1.c.b, keyed1.c.q.label("foo"))
.apply_labels()
.subquery()
)
- return select([stmt.c.keyed1_b, stmt.c.foo])
+ return select(stmt.c.keyed1_b, stmt.c.foo)
def _adapt_result_columns_fixture_two(self):
return text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
def _adapt_result_columns_fixture_three(self):
keyed1 = self.tables.keyed1
- stmt = select([keyed1.c.b, keyed1.c.q.label("foo")]).subquery()
+ stmt = select(keyed1.c.b, keyed1.c.q.label("foo")).subquery()
- return select([stmt.c.b, stmt.c.foo])
+ return select(stmt.c.b, stmt.c.foo)
def _adapt_result_columns_fixture_four(self):
keyed1 = self.tables.keyed1
- stmt1 = select([keyed1]).apply_labels()
+ stmt1 = select(keyed1).apply_labels()
a1 = keyed1.alias()
stmt2 = ClauseAdapter(a1).traverse(stmt1)
with self._proxy_fixture(cls):
rows = []
with self.engine.connect() as conn:
- r = conn.execute(select([self.table]))
+ r = conn.execute(select(self.table))
assert isinstance(r.cursor_strategy, cls)
for i in range(5):
rows.append(r.fetchone())
rows = r.fetchall()
eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)])
- r = conn.execute(select([self.table]))
+ r = conn.execute(select(self.table))
rows = r.fetchmany(None)
eq_(rows[0], (1, "t_1"))
# number of rows here could be one, or the whole thing
assert len(rows) == 1 or len(rows) == 11
- r = conn.execute(select([self.table]).limit(1))
+ r = conn.execute(select(self.table).limit(1))
r.fetchone()
eq_(r.fetchone(), None)
- r = conn.execute(select([self.table]).limit(5))
+ r = conn.execute(select(self.table).limit(5))
rows = r.fetchmany(6)
eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
self._assert_result_closed(r)
- r = conn.execute(select([self.table]).limit(5))
+ r = conn.execute(select(self.table).limit(5))
eq_(r.first(), (1, "t_1"))
self._assert_result_closed(r)
- r = conn.execute(select([self.table]).limit(5))
+ r = conn.execute(select(self.table).limit(5))
eq_(r.scalar(), 1)
self._assert_result_closed(r)
cache = {}
conn = conn.execution_options(compiled_cache=cache)
- stmt = select([literal("THERE", type_=MyType())])
+ stmt = select(literal("THERE", type_=MyType()))
for i in range(2):
r = conn.execute(stmt)
eq_(r.scalar(), "HI THERE")
eq_(result.fetchall(), [(1,)])
result2 = connection.execute(
- select([table.c.id, table.c.full]).order_by(table.c.id)
+ select(table.c.id, table.c.full).order_by(table.c.id)
)
eq_(result2.fetchall(), [(1, True), (2, False)])
eq_(result.fetchall(), [(1,)])
result2 = connection.execute(
- select([table.c.id, table.c.full]).order_by(table.c.id)
+ select(table.c.id, table.c.full).order_by(table.c.id)
)
eq_(result2.fetchall(), [(2, False)])
stmt = (
t2.insert()
- .values(x=select([t1.c.x]).scalar_subquery())
+ .values(x=select(t1.c.x).scalar_subquery())
.returning(t2.c.x)
)
table1, table2 = update_from_fixture
# test against a regular constructed subquery
- s = select([table2], table2.c.otherid == table1.c.myid)
+ s = select(table2).where(table2.c.otherid == table1.c.myid)
with testing.expect_warnings(
"implicitly coercing SELECT object to scalar subquery"
):
)
def test_legacy_calling_style_col_seq_only(self):
+ # keep [] here
stmt = select([table1.c.myid]).where(table1.c.myid == table2.c.otherid)
self.assert_compile(
def test_distance_on_aliases(self):
a1 = table1.alias("a1")
for s in (
- select([a1, table1], use_labels=True).subquery(),
- select([table1, a1], use_labels=True).subquery(),
+ select(a1, table1).apply_labels().subquery(),
+ select(table1, a1).apply_labels().subquery(),
):
assert s.corresponding_column(table1.c.col1) is s.c.table1_col1
assert s.corresponding_column(a1.c.col1) is s.c.a1_col1
self.assert_compile(group, "b / (y * w)")
def test_subquery_on_table(self):
- sel = select([table1, table2], use_labels=True).subquery()
+ sel = select(table1, table2).apply_labels().subquery()
assert sel.corresponding_column(table1.c.col1) is sel.c.table1_col1
assert (
def test_join_against_join(self):
j = outerjoin(table1, table2, table1.c.col1 == table2.c.col2)
- jj = select([table1.c.col1.label("bar_col1")], from_obj=[j]).alias(
- "foo"
+ jj = (
+ select(table1.c.col1.label("bar_col1")).select_from(j).alias("foo")
)
jjj = join(table1, jj, table1.c.col1 == jj.c.bar_col1)
assert jjj.corresponding_column(jjj.c.table1_col1) is jjj.c.table1_col1
t2 = Table("t2", m, Column("id", Integer, ForeignKey("t1.id")))
t3 = Table("t3", m2, Column("id", Integer, ForeignKey("t1.id2")))
- s = select([t2, t3], use_labels=True).subquery()
+ s = select(t2, t3).apply_labels().subquery()
assert_raises(exc.NoReferencedTableError, s.join, t1)
"""
t = table("t", column("x"))
- stmt = select([t.c.x])
+ stmt = select(t.c.x)
whereclause = annotation._deep_annotate(t.c.x == 5, {"foo": "bar"})
a1 = table1.alias()
s = select(a1.c.x).select_from(a1.join(table2, a1.c.x == table2.c.y))
- assert_s = select([select(s.subquery()).subquery()])
+ assert_s = select(select(s.subquery()).subquery())
for fn in (
sql_util._deep_deannotate,
lambda s: sql_util._deep_annotate(s, {"foo": "bar"}),
lambda s: visitors.replacement_traverse(s, {}, lambda x: None),
):
- sel = fn(select([fn(select(fn(s.subquery())).subquery())]))
+ sel = fn(select(fn(select(fn(s.subquery())).subquery())))
eq_(str(assert_s), str(sel))
def test_bind_unique_test(self):
def test_unary_boolean(self):
- s1 = select([not_(True)], use_labels=True)
+ s1 = select(not_(True)).apply_labels()
eq_(
[type(entry[-1]) for entry in s1.compile()._result_columns],
[Boolean],
"""test can use next_value() in select column expr"""
s = Sequence("my_sequence")
- self._assert_seq_result(testing.db.scalar(select([s.next_value()])))
+ self._assert_seq_result(testing.db.scalar(select(s.next_value())))
class SequenceExecTest(fixtures.TestBase):
"""test can use next_value() in select column expr"""
s = Sequence("my_sequence")
- self._assert_seq_result(connection.scalar(select([s.next_value()])))
+ self._assert_seq_result(connection.scalar(select(s.next_value())))
@testing.requires.sequences_in_other_clauses
@testing.provide_metadata
eq_(
connection.scalar(
- sa.select([cartitems.c.cart_id]).where(
+ sa.select(cartitems.c.cart_id).where(
cartitems.c.description == "lala"
),
),
t_seq_test = self.tables.t_seq_test
connection.execute(t_seq_test.insert().values(data="some data"))
- eq_(connection.scalar(select([t_seq_test.c.id])), 1)
+ eq_(connection.scalar(select(t_seq_test.c.id)), 1)
def test_default_textual_server_only(self, connection):
connection.exec_driver_sql(
t_seq_test = self.tables.t_seq_test_2
connection.execute(t_seq_test.insert().values(data="some data"))
- eq_(connection.scalar(select([t_seq_test.c.id])), 1)
+ eq_(connection.scalar(select(t_seq_test.c.id)), 1)
def test_drop_ordering(self):
with self.sql_execution_asserter(testing.db) as asserter:
table1 = self.tables.people
self.assert_compile(
- select([table1.tablesample(text("1"), name="alias").c.people_id]),
+ select(table1.tablesample(text("1"), name="alias").c.people_id),
"SELECT alias.people_id FROM "
"people AS alias TABLESAMPLE system(1)",
)
def test_text_adds_to_result_map(self):
t1, t2 = text("t1"), text("t2")
- stmt = select([t1, t2])
+ stmt = select(t1, t2)
compiled = stmt.compile()
eq_(
compiled._result_columns,
def test_select_composition_one(self):
self.assert_compile(
select(
- [
- literal_column("foobar(a)"),
- literal_column("pk_foo_bar(syslaal)"),
- ],
- text("a = 12"),
- from_obj=[
- text(
- "foobar left outer join lala on foobar.foo = lala.foo"
- )
- ],
+ literal_column("foobar(a)"),
+ literal_column("pk_foo_bar(syslaal)"),
+ )
+ .where(text("a = 12"))
+ .select_from(
+ text("foobar left outer join lala on foobar.foo = lala.foo")
),
"SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar "
"left outer join lala on foobar.foo = lala.foo WHERE a = 12",
def test_select_composition_three(self):
self.assert_compile(
- select([column("column1"), column("column2")], from_obj=table1)
+ select(column("column1"), column("column2"))
+ .select_from(table1)
.alias("somealias")
.select(),
"SELECT somealias.column1, somealias.column2 FROM "
# test that use_labels doesn't interfere with literal columns
self.assert_compile(
select(
- [
- text("column1"),
- column("column2"),
- column("column3").label("bar"),
- table1.c.myid,
- ],
- from_obj=table1,
- use_labels=True,
- ),
+ text("column1"),
+ column("column2"),
+ column("column3").label("bar"),
+ table1.c.myid,
+ )
+ .select_from(table1)
+ .apply_labels(),
"SELECT column1, column2, column3 AS bar, "
"mytable.myid AS mytable_myid "
"FROM mytable",
# with literal columns that have textual labels
self.assert_compile(
select(
- [
- text("column1 AS foobar"),
- text("column2 AS hoho"),
- table1.c.myid,
- ],
- from_obj=table1,
- use_labels=True,
- ),
+ text("column1 AS foobar"),
+ text("column2 AS hoho"),
+ table1.c.myid,
+ )
+ .select_from(table1)
+ .apply_labels(),
"SELECT column1 AS foobar, column2 AS hoho, "
"mytable.myid AS mytable_myid FROM mytable",
)
# no columns is being maintained.
self.assert_compile(
select(
- [
- literal_column("column1 AS foobar"),
- literal_column("column2 AS hoho"),
- table1.c.myid,
- ],
- from_obj=[table1],
+ literal_column("column1 AS foobar"),
+ literal_column("column2 AS hoho"),
+ table1.c.myid,
)
+ .select_from(table1)
.subquery()
.select(),
"SELECT anon_1.column1 AS foobar, anon_1.column2 AS hoho, "
def test_select_composition_seven(self):
self.assert_compile(
- select(
- [literal_column("col1"), literal_column("col2")],
- from_obj=table("tablename"),
- ).alias("myalias"),
+ select(literal_column("col1"), literal_column("col2"))
+ .select_from(table("tablename"))
+ .alias("myalias"),
"SELECT col1, col2 FROM tablename",
)
def test_select_composition_eight(self):
self.assert_compile(
- select(
- [table1.alias("t"), text("foo.f")],
- text("foo.f = t.id"),
- from_obj=[text("(select f from bar where lala=heyhey) foo")],
- ),
+ select(table1.alias("t"), text("foo.f"))
+ .where(text("foo.f = t.id"))
+ .select_from(text("(select f from bar where lala=heyhey) foo")),
"SELECT t.myid, t.name, t.description, foo.f FROM mytable AS t, "
"(select f from bar where lala=heyhey) foo WHERE foo.f = t.id",
)
def test_select_bundle_columns(self):
self.assert_compile(
select(
- [
- table1,
- table2.c.otherid,
- text("sysdate()"),
- text("foo, bar, lala"),
- ],
+ table1,
+ table2.c.otherid,
+ text("sysdate()"),
+ text("foo, bar, lala"),
+ ).where(
and_(
text("foo.id = foofoo(lala)"),
text("datetime(foo) = Today"),
t1 = text("select :foo").bindparams(bindparam("foo", 5, unique=True))
t2 = text("select :foo").bindparams(bindparam("foo", 10, unique=True))
- stmt = select([t1, t2])
+ stmt = select(t1, t2)
self.assert_compile(
stmt,
"SELECT select :foo_1, select :foo_2",
).bindparams(x=None, y=None, z=None)
s = select(
- [(func.current_date() + literal_column("s.a")).label("dates")]
+ (func.current_date() + literal_column("s.a")).label("dates")
).select_from(generate_series)
self.assert_compile(
)
def test_percent_signs_literal_binds(self):
- stmt = select([literal("percent % signs %%")])
+ stmt = select(literal("percent % signs %%"))
self.assert_compile(
stmt,
"SELECT 'percent % signs %%' AS anon_1",
.subquery()
)
- stmt = select([table1.c.myid]).select_from(
+ stmt = select(table1.c.myid).select_from(
table1.join(t, table1.c.myid == t.c.id)
)
compiled = stmt.compile()
.cte("t")
)
- s = select([table1]).where(table1.c.myid == t.c.id)
+ s = select(table1).where(table1.c.myid == t.c.id)
self.assert_compile(
s,
"WITH t AS (select id, name from user) "
)
stmt = (
- select([table1.c.myid])
+ select(table1.c.myid)
.select_from(table1.join(t, table1.c.myid == t.c.id))
.order_by(t.c.name)
)
.alias("t")
)
- s = select([table1]).where(table1.c.myid == t.c.id)
+ s = select(table1).where(table1.c.myid == t.c.id)
self.assert_compile(
s,
"SELECT mytable.myid, mytable.name, mytable.description "
assert subq.type._type_affinity is Integer()._type_affinity
- s = select([table1.c.myid, subq]).where(table1.c.myid == subq)
+ s = select(table1.c.myid, subq).where(table1.c.myid == subq)
self.assert_compile(
s,
"SELECT mytable.myid, (select id from user) AS anon_1 "
)
def test_where(self):
- self._test(select([table1.c.myid]).where, "myid == 5", "myid == 5")
+ self._test(select(table1.c.myid).where, "myid == 5", "myid == 5")
def test_column(self):
self._test(select, ["myid"], "myid")
def test_having(self):
- self._test(select([table1.c.myid]).having, "myid == 5", "myid == 5")
+ self._test(select(table1.c.myid).having, "myid == 5", "myid == 5")
def test_from(self):
- self._test(select([table1.c.myid]).select_from, "mytable", "mytable")
+ self._test(select(table1.c.myid).select_from, "mytable", "mytable")
class OrderByLabelResolutionTest(fixtures.TestBase, AssertsCompiledSQL):
)
def test_order_by_label(self):
- stmt = select([table1.c.myid.label("foo")]).order_by("foo")
+ stmt = select(table1.c.myid.label("foo")).order_by("foo")
self.assert_compile(
stmt, "SELECT mytable.myid AS foo FROM mytable ORDER BY foo"
)
def test_order_by_colname(self):
- stmt = select([table1.c.myid]).order_by("name")
+ stmt = select(table1.c.myid).order_by("name")
self.assert_compile(
stmt, "SELECT mytable.myid FROM mytable ORDER BY mytable.name"
)
def test_order_by_alias_colname(self):
t1 = table1.alias()
- stmt = select([t1.c.myid]).apply_labels().order_by("name")
+ stmt = select(t1.c.myid).apply_labels().order_by("name")
self.assert_compile(
stmt,
"SELECT mytable_1.myid AS mytable_1_myid "
for mod in modifiers:
order_by = mod(order_by)
- stmt = select([case]).order_by(order_by)
+ stmt = select(case).order_by(order_by)
col_expr = str(case)
self.assert_compile(
)
def test_order_by_named_label_from_anon_label(self):
- s1 = select([table1.c.myid.label(None).label("foo"), table1.c.name])
+ s1 = select(table1.c.myid.label(None).label("foo"), table1.c.name)
stmt = s1.order_by("foo")
self.assert_compile(
stmt,
# test [ticket:3335], assure that order_by("foo")
# catches the label named "foo" in the columns clause only,
# and not the label named "foo" in the FROM clause
- s1 = select([table1.c.myid.label("foo"), table1.c.name]).alias()
- stmt = select([s1.c.name, func.bar().label("foo")]).order_by("foo")
+ s1 = select(table1.c.myid.label("foo"), table1.c.name).alias()
+ stmt = select(s1.c.name, func.bar().label("foo")).order_by("foo")
self.assert_compile(
stmt,
)
def test_unresolvable_warning_order_by(self):
- stmt = select([table1.c.myid]).order_by("foobar")
+ stmt = select(table1.c.myid).order_by("foobar")
self._test_exception(stmt, "foobar")
def test_distinct_label(self):
- stmt = select([table1.c.myid.label("foo")]).distinct("foo")
+ stmt = select(table1.c.myid.label("foo")).distinct("foo")
self.assert_compile(
stmt,
"SELECT DISTINCT ON (foo) mytable.myid AS foo FROM mytable",
def test_distinct_label_keyword(self):
- stmt = select([table1.c.myid.label("foo")], distinct="foo")
+ stmt = select(table1.c.myid.label("foo")).distinct("foo")
self.assert_compile(
stmt,
"SELECT DISTINCT ON (foo) mytable.myid AS foo FROM mytable",
def test_unresolvable_distinct_label(self):
from sqlalchemy.dialects import postgresql
- stmt = select([table1.c.myid.label("foo")]).distinct("not a label")
+ stmt = select(table1.c.myid.label("foo")).distinct("not a label")
self._test_exception(stmt, "not a label", dialect=postgresql.dialect())
def test_group_by_label(self):
- stmt = select([table1.c.myid.label("foo")]).group_by("foo")
+ stmt = select(table1.c.myid.label("foo")).group_by("foo")
self.assert_compile(
stmt, "SELECT mytable.myid AS foo FROM mytable GROUP BY foo"
)
def test_group_by_colname(self):
- stmt = select([table1.c.myid]).group_by("name")
+ stmt = select(table1.c.myid).group_by("name")
self.assert_compile(
stmt, "SELECT mytable.myid FROM mytable GROUP BY mytable.name"
)
def test_unresolvable_warning_group_by(self):
- stmt = select([table1.c.myid]).group_by("foobar")
+ stmt = select(table1.c.myid).group_by("foobar")
self._test_exception(stmt, "foobar")
def test_asc(self):
- stmt = select([table1.c.myid]).order_by(asc("name"), "description")
+ stmt = select(table1.c.myid).order_by(asc("name"), "description")
self.assert_compile(
stmt,
"SELECT mytable.myid FROM mytable "
)
def test_group_by_subquery(self):
- stmt = select([table1]).alias()
- stmt = select([stmt]).apply_labels().group_by("myid")
+ stmt = select(table1).alias()
+ stmt = select(stmt).apply_labels().group_by("myid")
self.assert_compile(
stmt,
"SELECT anon_1.myid AS anon_1_myid, anon_1.name AS anon_1_name, "
def test_order_by_literal_col_quoting_one(self):
col = literal_column("SUM(ABC)").label("SUM(ABC)")
tbl = table("my_table")
- query = select([col]).select_from(tbl).order_by(col)
+ query = select(col).select_from(tbl).order_by(col)
self.assert_compile(
query,
'SELECT SUM(ABC) AS "SUM(ABC)" FROM my_table ORDER BY "SUM(ABC)"',
def test_order_by_literal_col_quoting_two(self):
col = literal_column("SUM(ABC)").label("SUM(ABC)_")
tbl = table("my_table")
- query = select([col]).select_from(tbl).order_by(col)
+ query = select(col).select_from(tbl).order_by(col)
self.assert_compile(
query,
'SELECT SUM(ABC) AS "SUM(ABC)_" FROM my_table ORDER BY '
def test_order_by_literal_col_quoting_one_explict_quote(self):
col = literal_column("SUM(ABC)").label(quoted_name("SUM(ABC)", True))
tbl = table("my_table")
- query = select([col]).select_from(tbl).order_by(col)
+ query = select(col).select_from(tbl).order_by(col)
self.assert_compile(
query,
'SELECT SUM(ABC) AS "SUM(ABC)" FROM my_table ORDER BY "SUM(ABC)"',
def test_order_by_literal_col_quoting_two_explicit_quote(self):
col = literal_column("SUM(ABC)").label(quoted_name("SUM(ABC)_", True))
tbl = table("my_table")
- query = select([col]).select_from(tbl).order_by(col)
+ query = select(col).select_from(tbl).order_by(col)
self.assert_compile(
query,
'SELECT SUM(ABC) AS "SUM(ABC)_" FROM my_table ORDER BY '
)
def test_order_by_func_label_desc(self):
- stmt = select([func.foo("bar").label("fb"), table1]).order_by(
- desc("fb")
- )
+ stmt = select(func.foo("bar").label("fb"), table1).order_by(desc("fb"))
self.assert_compile(
stmt,
)
def test_pg_distinct(self):
- stmt = select([table1]).distinct("name")
+ stmt = select(table1).distinct("name")
self.assert_compile(
stmt,
"SELECT DISTINCT ON (mytable.name) mytable.myid, "
)
def test_over(self):
- stmt = select([column("foo"), column("bar")]).subquery()
+ stmt = select(column("foo"), column("bar")).subquery()
stmt = select(
- [func.row_number().over(order_by="foo", partition_by="bar")]
+ func.row_number().over(order_by="foo", partition_by="bar")
).select_from(stmt)
self.assert_compile(
)
def test_union_column(self):
- s1 = select([table1])
- s2 = select([table1])
+ s1 = select(table1)
+ s2 = select(table1)
stmt = union(s1, s2).order_by("name")
self.assert_compile(
stmt,
)
def test_union_label(self):
- s1 = select([func.foo("hoho").label("x")])
- s2 = select([func.foo("Bar").label("y")])
+ s1 = select(func.foo("hoho").label("x"))
+ s2 = select(func.foo("Bar").label("y"))
stmt = union(s1, s2).order_by("x")
self.assert_compile(
stmt,
adapter = sql_util.ColumnAdapter(ta, anonymize_labels=True)
s1 = (
- select([adapter.columns[expr] for expr in exprs])
+ select(*[adapter.columns[expr] for expr in exprs])
.apply_labels()
.order_by("myid", "t1name", "x")
)
adapter = sql_util.ColumnAdapter(ta)
s1 = (
- select([adapter.columns[expr] for expr in exprs])
+ select(*[adapter.columns[expr] for expr in exprs])
.apply_labels()
.order_by("myid", "t1name", "x")
)
table = self._fixture()
self.assert_compile(
- select([table]),
+ select(table),
"SELECT test_table.x, lower(test_table.y) AS y FROM test_table",
)
def test_anonymous_expr(self):
table = self._fixture()
self.assert_compile(
- select([cast(table.c.y, String)]),
+ select(cast(table.c.y, String)),
"SELECT CAST(test_table.y AS VARCHAR) AS y FROM test_table",
)
table = self._fixture()
self.assert_compile(
- select([table]).apply_labels(),
+ select(table).apply_labels(),
"SELECT test_table.x AS test_table_x, "
"lower(test_table.y) AS test_table_y FROM test_table",
)
def test_select_cols_use_labels_result_map_targeting(self):
table = self._fixture()
- compiled = select([table]).apply_labels().compile()
+ compiled = select(table).apply_labels().compile()
assert table.c.y in compiled._create_result_map()["test_table_y"][1]
assert table.c.x in compiled._create_result_map()["test_table_x"][1]
def test_select_binds(self):
table = self._fixture()
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT test_table.x, lower(test_table.y) AS y FROM "
"test_table WHERE test_table.y = lower(:y_1)",
)
# 'x' is straight String
self.assert_compile(
- select([table.c.x]).where(table.c.x == "hi"),
+ select(table.c.x).where(table.c.x == "hi"),
"SELECT dialect_colexpr(test_table.x) AS x "
"FROM test_table WHERE test_table.x = dialect_bind(:x_1)",
dialect=dialect,
table = self._type_decorator_inside_fixture()
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT test_table.x, inside_colexpr(test_table.y) AS y "
"FROM test_table WHERE test_table.y = inside_bind(:y_1)",
)
# implementation supersedes that, which is the same as with other
# processor functions
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT dialect_colexpr(test_table.x) AS x, "
"dialect_colexpr(test_table.y) AS y FROM test_table "
"WHERE test_table.y = dialect_bind(:y_1)",
table = self._type_decorator_outside_fixture()
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT test_table.x, outside_colexpr(test_table.y) AS y "
"FROM test_table WHERE test_table.y = outside_bind(:y_1)",
)
# for "outer", the MyString isn't calling the "impl" functions,
# so we don't get the "impl"
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT dialect_colexpr(test_table.x) AS x, "
"outside_colexpr(test_table.y) AS y "
"FROM test_table WHERE test_table.y = outside_bind(:y_1)",
table = self._type_decorator_both_fixture()
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT test_table.x, "
"outside_colexpr(inside_colexpr(test_table.y)) AS y "
"FROM test_table WHERE "
# implementation supersedes that, which is the same as with other
# processor functions
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT dialect_colexpr(test_table.x) AS x, "
"outside_colexpr(dialect_colexpr(test_table.y)) AS y "
"FROM test_table WHERE "
table = self._variant_fixture(self._type_decorator_both_fixture())
self.assert_compile(
- select([table]).where(table.c.y == "hi"),
+ select(table).where(table.c.y == "hi"),
"SELECT test_table.x, "
"outside_colexpr(inside_colexpr(test_table.y)) AS y "
"FROM test_table WHERE "
def test_compound_select(self):
table = self._fixture()
- s1 = select([table]).where(table.c.y == "hi")
- s2 = select([table]).where(table.c.y == "there")
+ s1 = select(table).where(table.c.y == "hi")
+ s2 = select(table).where(table.c.y == "there")
self.assert_compile(
union(s1, s2),
def test_select_of_compound_select(self):
table = self._fixture()
- s1 = select([table]).where(table.c.y == "hi")
- s2 = select([table]).where(table.c.y == "there")
+ s1 = select(table).where(table.c.y == "hi")
+ s2 = select(table).where(table.c.y == "there")
self.assert_compile(
union(s1, s2).alias().select(),
# conversion back to upper
eq_(
connection.execute(
- select([self.tables.test_table]).order_by(
+ select(self.tables.test_table).order_by(
self.tables.test_table.c.y
)
).fetchall(),
testing.db.execute(
self.tables.test_table.insert(), {"x": "X1", "y": "Y1"}
)
- row = testing.db.execute(select([self.tables.test_table])).first()
+ row = testing.db.execute(select(self.tables.test_table)).first()
eq_(row._mapping[self.tables.test_table.c.y], "Y1")
def test_targeting_by_string(self):
testing.db.execute(
self.tables.test_table.insert(), {"x": "X1", "y": "Y1"}
)
- row = testing.db.execute(select([self.tables.test_table])).first()
+ row = testing.db.execute(select(self.tables.test_table)).first()
eq_(row._mapping["y"], "Y1")
def test_targeting_apply_labels(self):
self.tables.test_table.insert(), {"x": "X1", "y": "Y1"}
)
row = testing.db.execute(
- select([self.tables.test_table]).apply_labels()
+ select(self.tables.test_table).apply_labels()
).first()
eq_(row._mapping[self.tables.test_table.c.y], "Y1")
)
row = testing.db.execute(
select(
- [
- self.tables.test_table.c.x.label("xbar"),
- self.tables.test_table.c.y.label("ybar"),
- ]
+ self.tables.test_table.c.x.label("xbar"),
+ self.tables.test_table.c.y.label("ybar"),
)
).first()
eq_(row._mapping[self.tables.test_table.c.y], "Y1")
self._data_fixture()
stmt = (
- select([users.c.user_id, users.c.goofy8])
+ select(users.c.user_id, users.c.goofy8)
.where(users.c.goofy8.in_([15, 9]))
.order_by(users.c.user_id)
)
self._data_fixture()
stmt = (
- select([users.c.user_id, users.c.goofy8])
+ select(users.c.user_id, users.c.goofy8)
.where(users.c.goofy8.in_(bindparam("goofy", expanding=True)))
.order_by(users.c.user_id)
)
return "HI->%s<-THERE" % value
self.assert_compile(
- select([literal("test", MyType)]),
+ select(literal("test", MyType)),
"SELECT 'HI->test<-THERE' AS anon_1",
dialect="default",
literal_binds=True,
return "HI->%s<-THERE" % value
self.assert_compile(
- select([literal("test", MyType)]),
+ select(literal("test", MyType)),
"SELECT 'HI->test<-THERE' AS anon_1",
dialect="default",
literal_binds=True,
conn.execute(t.insert().values(data=coerce_fn("d1", MyType)))
eq_(
- conn.execute(select([coerce_fn(t.c.data, MyType)])).fetchall(),
+ conn.execute(select(coerce_fn(t.c.data, MyType))).fetchall(),
[("BIND_INd1BIND_OUT",)],
)
conn.execute(t.insert().values(data=coerce_fn(MyObj(), MyType)))
eq_(
- conn.execute(select([coerce_fn(t.c.data, MyType)])).fetchall(),
+ conn.execute(select(coerce_fn(t.c.data, MyType))).fetchall(),
[("BIND_INTHISISMYOBJBIND_OUT",)],
)
eq_(
conn.execute(
- select([t.c.data, coerce_fn(t.c.data, MyType)])
+ select(t.c.data, coerce_fn(t.c.data, MyType))
).fetchall(),
[("BIND_INd1", "BIND_INd1BIND_OUT")],
)
eq_(
conn.execute(
- select([t.c.data.label("x"), coerce_fn(t.c.data, MyType)])
+ select(t.c.data.label("x"), coerce_fn(t.c.data, MyType))
.alias()
.select()
).fetchall(),
# coerce on left side
eq_(
conn.execute(
- select([t.c.data, coerce_fn(t.c.data, MyType)]).where(
+ select(t.c.data, coerce_fn(t.c.data, MyType)).where(
coerce_fn(t.c.data, MyType) == "d1"
)
).fetchall(),
# coerce on right side
eq_(
conn.execute(
- select([t.c.data, coerce_fn(t.c.data, MyType)]).where(
+ select(t.c.data, coerce_fn(t.c.data, MyType)).where(
t.c.data == coerce_fn("d1", MyType)
)
).fetchall(),
conn.execute(t.insert().values(data=coerce_fn("d1", MyType)))
eq_(
conn.execute(
- select([t.c.data, coerce_fn(t.c.data, MyType)]).where(
+ select(t.c.data, coerce_fn(t.c.data, MyType)).where(
t.c.data == coerce_fn(None, MyType)
)
).fetchall(),
eq_(
conn.execute(
- select([t.c.data, coerce_fn(t.c.data, MyType)]).where(
+ select(t.c.data, coerce_fn(t.c.data, MyType)).where(
coerce_fn(t.c.data, MyType) == None
)
).fetchall(), # noqa
eq_(
conn.execute(
- select([t.c.data, coerce_fn(MyFoob(), MyType)])
+ select(t.c.data, coerce_fn(MyFoob(), MyType))
).fetchall(),
[("BIND_INd1", "BIND_INd1BIND_OUT")],
)
t = self.tables.t
conn.execute(t.insert().values(data=coerce_fn("d1", MyType)))
- stmt = select([t.c.data, coerce_fn(t.c.data, MyType)])
+ stmt = select(t.c.data, coerce_fn(t.c.data, MyType))
def col_to_bind(col):
if col is t.c.data:
conn.execute(t.insert().values(data=coerce_fn("d1", MyType)))
stmt = select(
- [
- bindparam(None, "x", String(50), unique=True),
- coerce_fn(
- bindparam(None, "x", String(50), unique=True), MyType
- ),
- ]
+ bindparam(None, "x", String(50), unique=True),
+ coerce_fn(bindparam(None, "x", String(50), unique=True), MyType),
)
eq_(
# when cast() is given an already typed value,
# the type does not take effect on the value itself.
eq_(
- connection.scalar(select([coerce_fn(literal("d1"), MyType)])),
+ connection.scalar(select(coerce_fn(literal("d1"), MyType))),
"d1BIND_OUT",
)
)
eq_(
- connection.execute(
- select([coerce_fn(t.c.data, MyType)])
- ).fetchall(),
+ connection.execute(select(coerce_fn(t.c.data, MyType))).fetchall(),
[("BIND_INd1BIND_OUT",)],
)
conn.execute(t.insert(), x="foo")
- eq_(conn.scalar(select([t.c.x]).where(t.c.x == "foo")), "fooUTWO")
+ eq_(conn.scalar(select(t.c.x).where(t.c.x == "foo")), "fooUTWO")
@testing.only_on("sqlite")
@testing.provide_metadata
eq_(
conn.scalar(
- select([t.c.x]).where(
+ select(t.c.x).where(
t.c.x
== datetime.datetime(2015, 4, 18, 10, 15, 17, 1059)
)
eq_(
connection.execute(
- select(["foo" + enum_table.c.someenum]).order_by(
- enum_table.c.id
- )
+ select("foo" + enum_table.c.someenum).order_by(enum_table.c.id)
).fetchall(),
[("footwo",), ("footwo",), ("fooone",)],
)
non_native_enum_table = self.tables.non_native_enum_table
connection.execute(enum_table.insert(), {"id": 1, "someenum": None})
- eq_(connection.scalar(select([enum_table.c.someenum])), None)
+ eq_(connection.scalar(select(enum_table.c.someenum)), None)
connection.execute(
non_native_enum_table.insert(), {"id": 1, "someenum": None}
)
- eq_(
- connection.scalar(select([non_native_enum_table.c.someenum])), None
- )
+ eq_(connection.scalar(select(non_native_enum_table.c.someenum)), None)
@testing.requires.enforces_check_constraints
def test_check_constraint(self, connection):
"'four' is not among the defined enum values. "
"Enum name: None. Possible values: one, two, three",
conn.scalar,
- select([self.tables.non_native_enum_table.c.someotherenum]),
+ select(self.tables.non_native_enum_table.c.someotherenum),
)
def test_non_native_round_trip(self, connection):
eq_(
connection.execute(
select(
- [
- non_native_enum_table.c.id,
- non_native_enum_table.c.someenum,
- ]
+ non_native_enum_table.c.id,
+ non_native_enum_table.c.someenum,
).order_by(non_native_enum_table.c.id)
).fetchall(),
[(1, "two"), (2, "two"), (3, "one")],
connection.execute(binary_table.insert(), data=data)
eq_(
connection.scalar(
- select([func.count("*")])
+ select(func.count("*"))
.select_from(binary_table)
.where(binary_table.c.data == data)
),
@testing.requires.binary_literals
def test_literal_roundtrip(self, connection):
- compiled = select([cast(literal(util.b("foo")), LargeBinary)]).compile(
+ compiled = select(cast(literal(util.b("foo")), LargeBinary)).compile(
dialect=testing.db.dialect, compile_kwargs={"literal_binds": True}
)
result = connection.execute(compiled)
eq_(
connection.execute(
select(
- [
- test_table.c.id,
- test_table.c.data,
- test_table.c.atimestamp,
- ]
+ test_table.c.id,
+ test_table.c.data,
+ test_table.c.atimestamp,
).where(expr),
{"thedate": datetime.date(2007, 10, 15)},
).fetchall(),
assert expr.right.type.__class__ is MyTypeDec
eq_(
- connection.execute(select([expr.label("foo")])).scalar(),
+ connection.execute(select(expr.label("foo"))).scalar(),
"BIND_INfooBIND_INhiBIND_OUT",
)
is_(expr.type.__class__, MyTypeDec)
eq_(
- connection.execute(select([expr.label("foo")])).scalar(),
+ connection.execute(select(expr.label("foo"))).scalar(),
"BIND_INfooBIND_IN6BIND_OUT",
)
eq_(expr.type, types.NULLTYPE)
def test_distinct(self, connection):
- s = select([distinct(test_table.c.avalue)])
+ s = select(distinct(test_table.c.avalue))
eq_(connection.execute(s).scalar(), 25)
- s = select([test_table.c.avalue.distinct()])
+ s = select(test_table.c.avalue.distinct())
eq_(connection.execute(s).scalar(), 25)
assert distinct(test_table.c.data).type == test_table.c.data.type
)
eq_(
- conn.scalar(select([boolean_table.c.unconstrained_value])),
- True,
+ conn.scalar(select(boolean_table.c.unconstrained_value)), True,
)
def test_bind_processor_coercion_native_true(self):
u = update(
table1,
values={
- table1.c.name: select(
- [mt.c.name], mt.c.myid == table1.c.myid
- ).scalar_subquery()
+ table1.c.name: select(mt.c.name)
+ .where(mt.c.myid == table1.c.myid)
+ .scalar_subquery()
},
)
self.assert_compile(
table2 = self.tables.myothertable
# test against a regular constructed subquery
- s = select(
- [table2], table2.c.otherid == table1.c.myid
- ).scalar_subquery()
+ s = (
+ select(table2)
+ .where(table2.c.otherid == table1.c.myid)
+ .scalar_subquery()
+ )
u = update(table1, table1.c.name == "jack", values={table1.c.name: s})
self.assert_compile(
u,
table2 = self.tables.myothertable
# test a non-correlated WHERE clause
- s = select([table2.c.othername], table2.c.otherid == 7)
+ s = select(table2.c.othername).where(table2.c.otherid == 7)
u = update(table1, table1.c.name == s.scalar_subquery())
self.assert_compile(
u,
table2 = self.tables.myothertable
# test one that is actually correlated...
- s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
+ s = select(table2.c.othername).where(table2.c.otherid == table1.c.myid)
u = table1.update(table1.c.name == s.scalar_subquery())
self.assert_compile(
u,
Column(
"col2",
Integer,
- onupdate=select([func.coalesce(func.max(foo.c.id))]),
+ onupdate=select(func.coalesce(func.max(foo.c.id))),
),
Column("col3", String(30)),
)
cte = (
q.update().where(q.c.z == 1).values(x=7).returning(q.c.z).cte("c")
)
- stmt = select([p.c.s, cte.c.z]).where(p.c.s == cte.c.z)
+ stmt = select(p.c.s, cte.c.z).where(p.c.s == cte.c.z)
dialect = default.StrCompileDialect()
dialect.paramstyle = "qmark"
"""
table1 = self.tables.mytable
table2 = self.tables.myothertable
- sel = select([table2]).where(table2.c.otherid == 5).alias()
+ sel = select(table2).where(table2.c.otherid == 5).alias()
upd = (
table1.update()
.where(table1.c.name == sel.c.othername)
Column("extra", String(45)),
)
- subset_select = select([common.c.id, common.c.data]).alias()
+ subset_select = select(common.c.id, common.c.data).alias()
eq_(set(sql_util.find_tables(subset_select)), {common})
)
calias = common.alias()
- subset_select = select([common.c.id, calias.c.data]).subquery()
+ subset_select = select(common.c.id, calias.c.data).subquery()
eq_(
set(sql_util.find_tables(subset_select, include_aliases=True)),
(column("q").label(None).desc().label(None), [column("q")]),
("foo", []), # textual label reference
(
- select([column("q")]).scalar_subquery().label(None),
- [select([column("q")]).scalar_subquery().label(None)],
+ select(column("q")).scalar_subquery().label(None),
+ [select(column("q")).scalar_subquery().label(None)],
),
(
- select([column("q")]).scalar_subquery().label(None).desc(),
- [select([column("q")]).scalar_subquery().label(None)],
+ select(column("q")).scalar_subquery().label(None).desc(),
+ [select(column("q")).scalar_subquery().label(None)],
),
)
def test_unwrap_order_by(self, expr, expected):
name="Spaces and Cases",
).data([(1, "textA", 99), (2, "textB", 88)])
self.assert_compile(
- select([v1]),
+ select(v1),
'SELECT "Spaces and Cases"."CaseSensitive", '
'"Spaces and Cases"."has spaces" FROM '
"(VALUES (:param_1, :param_2, :param_3), "
def test_bound_parameters(self, literal_parameter_fixture):
literal_parameter_fixture = literal_parameter_fixture(False)
- stmt = select([literal_parameter_fixture])
+ stmt = select(literal_parameter_fixture)
self.assert_compile(
stmt,
def test_literal_parameters(self, literal_parameter_fixture):
literal_parameter_fixture = literal_parameter_fixture(True)
- stmt = select([literal_parameter_fixture])
+ stmt = select(literal_parameter_fixture)
self.assert_compile(
stmt,
values = Values(
column("column1", Integer), column("column2", Integer),
).data([(1, 1), (2, 1), (3, 2), (3, 3)])
- stmt = select([people, values]).select_from(
+ stmt = select(people, values).select_from(
people.join(values, values.c.column2 == people.c.people_id)
)
self.assert_compile(
column("bookcase_owner_id", Integer),
name="bookcases",
).data([(1, 1), (2, 1), (3, 2), (3, 3)])
- stmt = select([people, values]).select_from(
+ stmt = select(people, values).select_from(
people.join(
values, values.c.bookcase_owner_id == people.c.people_id
)
.data([(1, 1), (2, 1), (3, 2), (3, 3)])
.alias("bookcases")
)
- stmt = select([people, values]).select_from(
+ stmt = select(people, values).select_from(
people.join(
values, values.c.bookcase_owner_id == people.c.people_id
)
).data([(1, 1), (2, 1), (3, 2), (3, 3)])
values = alias(values, "bookcases")
- stmt = select([people, values]).select_from(
+ stmt = select(people, values).select_from(
people.join(
values, values.c.bookcase_owner_id == people.c.people_id
)
.data([(1, 1), (2, 1), (3, 2), (3, 3)])
.lateral()
)
- stmt = select([people, values]).select_from(
- people.join(values, true())
- )
+ stmt = select(people, values).select_from(people.join(values, true()))
self.assert_compile(
stmt,
"SELECT people.people_id, people.age, people.name, "
column("bookcase_owner_id", Integer),
name="bookcases",
).data([(1, 1), (2, 1), (3, 2), (3, 3)])
- stmt = select([people, values])
+ stmt = select(people, values)
with testing.expect_warnings(
r"SELECT statement has a cartesian product between FROM "
column("bookcase_id", Integer),
column("bookcase_owner_id", Integer),
).data([(1, 1), (2, 1), (3, 2), (3, 3)])
- stmt = select([people, values])
+ stmt = select(people, values)
with testing.expect_warnings(
r"SELECT statement has a cartesian product between FROM "