]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Implement support for functions as FROM with columns clause support
authorMike Bayer <mike_mp@zzzcomputing.com>
Sun, 17 Jan 2021 18:35:02 +0000 (13:35 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 3 Feb 2021 20:52:17 +0000 (15:52 -0500)
Implemented support for "table valued functions" along with additional
syntaxes supported by PostgreSQL, one of the most commonly requested
features. Table valued functions are SQL functions that return lists of
values or rows, and are prevalent in PostgreSQL in the area of JSON
functions, where the "table value" is commonly referred towards as the
"record" datatype. Table valued functions are also supported by Oracle and
SQL Server.

Moved from I5b093b72533ef695293e737eb75850b9713e5e03 due
to accidental push

Fixes: #3566
Change-Id: Iea36d04c80a5ed3509dcdd9ebf0701687143fef5

32 files changed:
doc/build/changelog/unreleased_14/3566.rst [new file with mode: 0644]
doc/build/core/functions.rst
doc/build/core/selectable.rst
doc/build/dialects/postgresql.rst
doc/build/orm/join_conditions.rst
doc/build/tutorial/data.rst
lib/sqlalchemy/dialects/oracle/base.py
lib/sqlalchemy/dialects/postgresql/array.py
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/sql/base.py
lib/sqlalchemy/sql/coercions.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/expression.py
lib/sqlalchemy/sql/functions.py
lib/sqlalchemy/sql/roles.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/sql/sqltypes.py
lib/sqlalchemy/sql/traversals.py
lib/sqlalchemy/sql/type_api.py
lib/sqlalchemy/sql/visitors.py
lib/sqlalchemy/testing/requirements.py
lib/sqlalchemy/testing/suite/test_select.py
test/dialect/mssql/test_query.py
test/dialect/oracle/test_compiler.py
test/dialect/oracle/test_dialect.py
test/dialect/postgresql/test_query.py
test/requirements.py
test/sql/test_compare.py
test/sql/test_functions.py
test/sql/test_lateral.py
test/sql/test_roles.py

diff --git a/doc/build/changelog/unreleased_14/3566.rst b/doc/build/changelog/unreleased_14/3566.rst
new file mode 100644 (file)
index 0000000..9825671
--- /dev/null
@@ -0,0 +1,34 @@
+.. change::
+    :tags: feature, sql
+    :tickets: 3566
+
+    Implemented support for "table valued functions" along with additional
+    syntaxes supported by PostgreSQL, one of the most commonly requested
+    features. Table valued functions are SQL functions that return lists of
+    values or rows, and are prevalent in PostgreSQL in the area of JSON
+    functions, where the "table value" is commonly referred towards as the
+    "record" datatype. Table valued functions are also supported by Oracle and
+    SQL Server.
+
+    Features added include:
+
+    * the :meth:`_functions.FunctionElement.table_valued` modifier that creates a table-like
+      selectable object from a SQL function
+    * A :class:`_sql.TableValuedAlias` construct that renders a SQL function
+      as a named table
+    * Support for PostgreSQL's special "derived column" syntax that includes
+      column names and sometimes datatypes, such as for the
+      ``json_to_recordset`` function, using the
+      :meth:`_sql.TableValuedAlias.render_derived` method.
+    * Support for PostgreSQL's "WITH ORDINALITY" construct using the
+      :paramref:`_functions.FunctionElement.table_valued.with_ordinality` parameter
+    * Support for selection FROM a SQL function as column-valued scalar, a
+      syntax supported by PostgreSQL and Oracle, via the
+      :meth:`_functions.FunctionElement.column_valued` method
+    * A way to SELECT a single column from a table-valued expression without
+      using a FROM clause via the :meth:`_functions.FunctionElement.scalar_table_valued`
+      method.
+
+    .. seealso::
+
+      :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial`
\ No newline at end of file
index 6cfcbd3041da08fb9c05bcbe41cfc0a603f5d703..8a3c5221fd15670f46da42923f832f2891e408b6 100644 (file)
@@ -19,10 +19,12 @@ unknown to SQLAlchemy, built-in or user defined. The section here only
 describes those functions where SQLAlchemy already knows what argument and
 return types are in use.
 
+.. seealso::
+
+    :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
+
 .. automodule:: sqlalchemy.sql.functions
    :members:
-   :undoc-members:
    :exclude-members: func
 
 
-
index e6ff9412e00ad0556cc0f694589d917b9bdfaa06..b6f1001149c8198b39ecfb08bc6a59180a6c26a7 100644 (file)
@@ -139,6 +139,9 @@ The classes here are generated using the constructors listed at
 .. autoclass:: TableSample
    :members:
 
+.. autoclass:: TableValuedAlias
+   :members:
+
 .. autoclass:: TextualSelect
    :members:
    :inherited-members:
index 2b062f581f16b980d19df2c7beb0ef99709da97c..34cdabc1039e89a2640a4dc0580f3d28ec9686f7 100644 (file)
@@ -5,8 +5,8 @@ PostgreSQL
 
 .. automodule:: sqlalchemy.dialects.postgresql.base
 
-PostgreSQL Data Types
----------------------
+PostgreSQL Data Types and Custom SQL Constructs
+------------------------------------------------
 
 As with all SQLAlchemy dialects, all UPPERCASE types that are known to be
 valid with PostgreSQL are importable from the top level dialect, whether
index 52ae156b24d2776d50209658df99a00850992a29..9f163307e4fe13105aed21896701580409a9dec5 100644 (file)
@@ -310,6 +310,8 @@ Will render as::
    flag to assist in the creation of :func:`_orm.relationship` constructs using
    custom operators.
 
+.. _relationship_custom_operator_sql_function:
+
 Custom operators based on SQL functions
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
index b6c8260a86113ce4b169e9564517e9d65cb33b36..1b9d946b291c33df72761442761a17fe75d51416 100644 (file)
@@ -797,6 +797,9 @@ we call upon the name ``count()`` name::
     >>> print(count_fn)
     {opensql}count(user_account.id)
 
+SQL functions are described in more detail later in this tutorial at
+:ref:`tutorial_functions`.
+
 When using aggregate functions in SQL, the GROUP BY clause is essential in that
 it allows rows to be partitioned into groups where aggregate functions will
 be applied to each group individually.  When requesting non-aggregated columns
@@ -1278,6 +1281,419 @@ clause:
     [('patrick',)]
     {opensql}ROLLBACK{stop}
 
+.. _tutorial_functions:
+
+Working with SQL Functions
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+First introduced earlier in this section at
+:ref:`tutorial_group_by_w_aggregates`, the :data:`_sql.func` object serves as a
+factory for creating new :class:`_functions.Function` objects, which when used
+in a construct like :func:`_sql.select`, produce a SQL function display,
+typically consisting of a name, some parenthesis (although not always), and
+possibly some arguments. Examples of typical SQL functions include:
+
+* the ``count()`` function, an aggregate function which counts how many
+  rows are returned:
+
+  .. sourcecode:: pycon+sql
+
+      >>> print(select(func.count()).select_from(user_table))
+      SELECT count(*) AS count_1
+      FROM user_account
+
+  ..
+
+* the ``lower()`` function, a string function that converts a string to lower
+  case:
+
+  .. sourcecode:: pycon+sql
+
+      >>> print(select(func.lower("A String With Much UPPERCASE")))
+      SELECT lower(:lower_2) AS lower_1
+
+  ..
+
+* the ``now()`` function, which provides for the current date and time; as this
+  is a common function, SQLAlchemy knows how to render this differently for each
+  backend, in the case of SQLite using the CURRENT_TIMESTAMP function:
+
+  .. sourcecode:: pycon+sql
+
+      >>> stmt = select(func.now())
+      >>> with engine.connect() as conn:
+      ...     result = conn.execute(stmt)
+      ...     print(result.all())
+      {opensql}BEGIN (implicit)
+      SELECT CURRENT_TIMESTAMP AS now_1
+      [...] ()
+      [(datetime.datetime(...),)]
+      ROLLBACK
+
+  ..
+
+As most database backends feature dozens if not hundreds of different SQL
+functions, :data:`_sql.func` tries to be as liberal as possible in what it
+accepts. Any name that is accessed from this namespace is automatically
+considered to be a SQL function that will render in a generic way::
+
+    >>> print(select(func.some_crazy_function(user_table.c.name, 17)))
+    SELECT some_crazy_function(user_account.name, :some_crazy_function_2) AS some_crazy_function_1
+    FROM user_account
+
+At the same time, a relatively small set of extremely common SQL functions such
+as :class:`_functions.count`, :class:`_functions.now`, :class:`_functions.max`,
+:class:`_functions.concat` include pre-packaged versions of themselves which
+provide for proper typing information as well as backend-specific SQL
+generation in some cases.  The example below contrasts the SQL generation
+that occurs for the PostgreSQL dialect compared to the Oracle dialect for
+the :class:`_functions.now` function::
+
+    >>> from sqlalchemy.dialects import postgresql
+    >>> print(select(func.now()).compile(dialect=postgresql.dialect()))
+    SELECT now() AS now_1
+
+    >>> from sqlalchemy.dialects import oracle
+    >>> print(select(func.now()).compile(dialect=oracle.dialect()))
+    SELECT CURRENT_TIMESTAMP AS now_1 FROM DUAL
+
+Functions Have Return Types
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+As functions are column expressions, they also have
+SQL :ref:`datatypes <types_toplevel>` that describe the data type of
+a generated SQL expression.  We refer to these types here as "SQL return types",
+in reference to the type of SQL value that is returned by the function
+in the context of a database-side SQL expression,
+as opposed to the "return type" of a Python function.
+
+The SQL return type of any SQL function may be accessed, typically for
+debugging purposes, by referring to the :attr:`_functions.Function.type`
+attribute::
+
+    >>> func.now().type
+    DateTime()
+
+These SQL return types are significant when making
+use of the function expression in the context of a larger expression; that is,
+math operators will work better when the datatype of the expression is
+something like :class:`_types.Integer` or :class:`_types.Numeric`, JSON
+accessors in order to work need to be using a type such as
+:class:`_types.JSON`.  Certain classes of functions return entire rows
+instead of column values, where there is a need to refer to specific columns;
+such functions are referred towards
+as :ref:`table valued functions <tutorial_functions_table_valued>`.
+
+The SQL return type of the function may also be significant when executing a
+statement and getting rows back, for those cases where SQLAlchemy has to apply
+result-set processing. A prime example of this are date-related functions on
+SQLite, where SQLAlchemy's :class:`_types.DateTime` and related datatypes take
+on the role of converting from string values to Python ``datetime()`` objects
+as result rows are received.
+
+To apply a specific type to a function we're creating, we pass it using the
+:paramref:`_functions.Function.type_` parameter; the type argument may be
+either a :class:`_types.TypeEngine` class or an instance.  In the example
+below we pass the :class:`_types.JSON` class to generate the PostgreSQL
+``json_object()`` function, noting that the SQL return type will be of
+type JSON::
+
+    >>> from sqlalchemy import JSON
+    >>> function_expr = func.json_object('{a, 1, b, "def", c, 3.5}', type_=JSON)
+
+By creating our JSON function with the :class:`_types.JSON` datatype, the
+SQL expression object takes on JSON-related features, such as that of accessing
+elements::
+
+    >>> stmt = select(function_expr["def"])
+    >>> print(stmt)
+    SELECT json_object(:json_object_1)[:json_object_2] AS anon_1
+
+Built-in Functions Have Pre-Configured Return Types
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For common aggregate functions like :class:`_functions.count`,
+:class:`_functions.max`, :class:`_functions.min` as well as a very small number
+of date functions like :class:`_functions.now` and string functions like
+:class:`_functions.concat`, the SQL return type is set up appropriately,
+sometimes based on usage. The :class:`_functions.max` function and similar
+aggregate filtering functions will set up the SQL return type based on the
+argument given::
+
+    >>> m1 = func.max(Column("some_int", Integer))
+    >>> m1.type
+    Integer()
+
+    >>> m2 = func.max(Column("some_str", String))
+    >>> m2.type
+    String()
+
+Date and time functions typically correspond to SQL expressions described by
+:class:`_types.DateTime`, :class:`_types.Date` or :class:`_types.Time`::
+
+    >>> func.now().type
+    DateTime()
+    >>> func.current_date().type
+    Date()
+
+A known string function such as :class:`_functions.concat`
+will know that a SQL expression would be of type :class:`_types.String`::
+
+    >>> func.concat("x", "y").type
+    String()
+
+However, for the vast majority of SQL functions, SQLAlchemy does not have them
+explicitly present in its very small list of known functions.  For example,
+while there is typically no issue using SQL functions ``func.lower()``
+and ``func.upper()`` to convert the casing of strings, SQLAlchemy doesn't
+actually know about these functions, so they have a "null" SQL return type::
+
+    >>> func.upper("lowercase").type
+    NullType()
+
+For simple functions like ``upper`` and ``lower``, the issue is not usually
+significant, as string values may be received from the database without any
+special type handling on the SQLAlchemy side, and SQLAlchemy's type
+coercion rules can often correctly guess intent as well; the Python ``+``
+operator for example will be correctly interpreted as the string concatenation
+operator based on looking at both sides of the expression::
+
+    >>> print(select(func.upper("lowercase") + " suffix"))
+    SELECT upper(:upper_1) || :upper_2 AS anon_1
+
+Overall, the scenario where the
+:paramref:`_functions.Function.type_` parameter is likely necessary is:
+
+1. the function is not already a SQLAlchemy built-in function; this can be
+   evidenced by creating the function and observing the :attr:`_functions.Function.type`
+   attribute, that is::
+
+      >>> func.count().type
+      Integer()
+
+   ..
+
+   vs.::
+
+      >>> func.json_object('{"a", "b"}').type
+      NullType()
+
+2. Function-aware expression support is needed; this most typically refers to
+   special operators related to datatypes such as :class:`_types.JSON` or
+   :class:`_types.ARRAY`
+
+3. Result value processing is needed, which may include types such as
+   :class:`_functions.DateTime`, :class:`_types.Boolean`, :class:`_types.Enum`,
+   or again special datatypes such as :class:`_types.JSON`,
+   :class:`_types.ARRAY`.
+
+.. _tutorial_window_functions:
+
+Using Window Functions
+~~~~~~~~~~~~~~~~~~~~~~
+
+A window function is a special use of a SQL aggregate function which calculates
+the aggregate value over the rows being returned in a group as the individual
+result rows are processed.  Whereas a function like ``MAX()`` will give you
+the highest value of a column within a set of rows, using the same function
+as a "window function" will given you the highest value for each row,
+*as of that row*.
+
+In SQL, window functions allow one to specify the rows over which the
+function should be applied, a "partition" value which considers the window
+over different sub-sets of rows, and an "order by" expression which importantly
+indicates the order in which rows should be applied to the aggregate function.
+
+In SQLAlchemy, all SQL functions generated by the :data:`_sql.func` namespace
+include a method :meth:`_functions.FunctionElement.over` which
+grants the window function, or "OVER", syntax; the construct produced
+is the :class:`_sql.Over` construct.
+
+A common function used with window functions is the ``row_number()`` function
+which simply counts rows. We may partition this row count against user name to
+number the email addresses of individual users:
+
+.. sourcecode:: pycon+sql
+
+    >>> stmt = select(
+    ...     func.row_number().over(partition_by=user_table.c.name),
+    ...     user_table.c.name,
+    ...     address_table.c.email_address
+    ... ).select_from(user_table).join(address_table)
+    >>> with engine.connect() as conn:
+    ...     result = conn.execute(stmt)
+    ...     print(result.all())
+    {opensql}BEGIN (implicit)
+    SELECT row_number() OVER (PARTITION BY user_account.name) AS anon_1,
+    user_account.name, address.email_address
+    FROM user_account JOIN address ON user_account.id = address.user_id
+    [...] ()
+    [(1, 'sandy', 'sandy@sqlalchemy.org'), (2, 'sandy', 'sandy@squirrelpower.org'), (1, 'spongebob', 'spongebob@sqlalchemy.org')]
+    ROLLBACK
+
+Above, the :paramref:`_functions.FunctionElement.over.partition_by` parameter
+is used so that the ``PARTITION BY`` clause is rendered within the OVER clause.
+We also may make use of the ``ORDER BY`` clause using :paramref:`_functions.FunctionElement.over.order_by`:
+
+.. sourcecode:: pycon+sql
+
+    >>> stmt = select(
+    ...     func.count().over(order_by=user_table.c.name),
+    ...     user_table.c.name,
+    ...     address_table.c.email_address).select_from(user_table).join(address_table)
+    >>> with engine.connect() as conn:
+    ...     result = conn.execute(stmt)
+    ...     print(result.all())
+    {opensql}BEGIN (implicit)
+    SELECT count(*) OVER (ORDER BY user_account.name) AS anon_1,
+    user_account.name, address.email_address
+    FROM user_account JOIN address ON user_account.id = address.user_id
+    [...] ()
+    [(2, 'sandy', 'sandy@sqlalchemy.org'), (2, 'sandy', 'sandy@squirrelpower.org'), (3, 'spongebob', 'spongebob@sqlalchemy.org')]
+    ROLLBACK
+
+Further options for window functions include usage of ranges; see
+:func:`_expression.over` for more examples.
+
+.. tip::
+
+   It's important to note that the :meth:`_functions.FunctionElement.over`
+   method only applies to those SQL functions which are in fact aggregate
+   functions; while the :class:`_sql.Over` construct will happily render itself
+   for any SQL function given, the database will reject the expression if the
+   function itself is not a SQL aggregate function.
+
+.. _tutorial_functions_within_group:
+
+Special Modifiers WITHIN GROUP, FILTER
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The "WITHIN GROUP" SQL syntax is used in conjunction with an "ordered set"
+or a "hypothetical set" aggregate
+function.  Common "ordered set" functions include ``percentile_cont()``
+and ``rank()``.  SQLAlchemy includes built in implementations
+:class:`_functions.rank`, :class:`_functions.dense_rank`,
+:class:`_functions.mode`, :class:`_functions.percentile_cont` and
+:class:`_functions.percentile_disc` which include a :meth:`_functions.FunctionElement.within_group`
+method::
+
+    >>> print(
+    ...     func.unnest(
+    ...         func.percentile_disc([0.25,0.5,0.75,1]).within_group(user_table.c.name)
+    ...     )
+    ... )
+    unnest(percentile_disc(:percentile_disc_1) WITHIN GROUP (ORDER BY user_account.name))
+
+"FILTER" is supported by some backends to limit the range of an aggregate function to a
+particular subset of rows compared to the total range of rows returned, available
+using the :meth:`_functions.FunctionElement.filter` method::
+
+    >>> stmt = select(
+    ...     func.count(address_table.c.email_address).filter(user_table.c.name == 'sandy'),
+    ...     func.count(address_table.c.email_address).filter(user_table.c.name == 'spongebob')
+    ... ).select_from(user_table).join(address_table)
+    >>> with engine.connect() as conn:
+    ...     result = conn.execute(stmt)
+    ...     print(result.all())
+    {opensql}BEGIN (implicit)
+    SELECT count(address.email_address) FILTER (WHERE user_account.name = ?) AS anon_1,
+    count(address.email_address) FILTER (WHERE user_account.name = ?) AS anon_2
+    FROM user_account JOIN address ON user_account.id = address.user_id
+    [...] ('sandy', 'spongebob')
+    [(2, 1)]
+    ROLLBACK
+
+.. _tutorial_functions_table_valued:
+
+Table-Valued Functions
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Table-valued SQL functions support a scalar representation that contains named
+sub-elements. Often used for JSON and ARRAY-oriented functions as well as
+functions like ``generate_series()``, the table-valued function is specified in
+the FROM clause, and is then referred towards as a table, or sometimes even as
+a column. Functions of this form are prominent within the PostgreSQL database,
+however some forms of table valued functions are also supported by SQLite,
+Oracle, and SQL Server.
+
+.. seealso::
+
+    :ref:`postgresql_table_valued_overview` - in the :ref:`postgresql_toplevel` documentation.
+
+    While many databases support table valued and other special
+    forms, PostgreSQL tends to be where there is the most demand for these
+    features.   See this section for additional examples of PostgreSQL
+    syntaxes as well as additional features.
+
+SQLAlchemy provides the :meth:`_functions.FunctionElement.table_valued` method
+as the basic "table valued function" construct, which will convert a
+:data:`_sql.func` object into a FROM clause containing a series of named
+columns, based on string names passed positionally. This returns a
+:class:`_sql.TableValuedAlias` object, which is a function-enabled
+:class:`_sql.Alias` construct that may be used as any other FROM clause as
+introduced at :ref:`tutorial_using_aliases`. Below we illustrate the
+``json_each()`` function, which while common on PostgreSQL is also supported by
+modern versions of SQLite::
+
+    >>> onetwothree = func.json_each('["one", "two", "three"]').table_valued("value")
+    >>> stmt = select(onetwothree).where(onetwothree.c.value.in_(["two", "three"]))
+    >>> with engine.connect() as conn:
+    ...     result = conn.execute(stmt)
+    ...     print(result.all())
+    {opensql}BEGIN (implicit)
+    SELECT anon_1.value
+    FROM json_each(?) AS anon_1
+    WHERE anon_1.value IN (?, ?)
+    [...] ('["one", "two", "three"]', 'two', 'three')
+    [('two',), ('three',)]
+    ROLLBACK
+
+Above, we used the ``json_each()`` JSON function supported by SQLite and
+PostgreSQL to generate a table valued expression with a single column referred
+towards as ``value``, and then selected two of its three rows.
+
+.. seealso::
+
+    :ref:`postgresql_table_valued` - in the :ref:`postgresql_toplevel` documentation -
+    this section will detail additional syntaxes such as special column derivations
+    and "WITH ORDINALITY" that are known to work with PostgreSQL.
+
+.. _tutorial_functions_column_valued:
+
+Column Valued Functions - Table Valued Function as a Scalar Column
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+A special syntax supported by PostgreSQL and Oracle is that of referring
+towards a function in the FROM clause, which then delivers itself as a
+single column in the columns clause of a SELECT statement or other column
+expression context.  PostgreSQL makes great use of this syntax for such
+functions as ``json_array_elements()``, ``json_object_keys()``,
+``json_each_text()``, ``json_each()``, etc.
+
+SQLAlchemy refers to this as a "column valued" function and is available
+by applying the :meth:`_functions.FunctionElement.column_valued` modifier
+to a :class:`_functions.Function` construct::
+
+    >>> from sqlalchemy import select, func
+    >>> stmt = select(func.json_array_elements('["one", "two"]').column_valued("x"))
+    >>> print(stmt)
+    SELECT x
+    FROM json_array_elements(:json_array_elements_1) AS x
+
+The "column valued" form is also supported by the Oracle dialect, where
+it is usable for custom SQL functions::
+
+    >>> from sqlalchemy.dialects import oracle
+    >>> stmt = select(func.scalar_strings(5).column_valued("s"))
+    >>> print(stmt.compile(dialect=oracle.dialect()))
+    SELECT COLUMN_VALUE s
+    FROM TABLE (scalar_strings(:scalar_strings_1)) s
+
+
+.. seealso::
+
+    :ref:`postgresql_column_valued` - in the :ref:`postgresql_toplevel` documentation.
+
 
 .. rst-class:: core-header, orm-addin
 
index 6c381a2eca8dbab646720a1bd1c76841c939a04f..9344abeeefe475060d2b8cd4751533c70eecb848 100644 (file)
@@ -924,6 +924,19 @@ class OracleCompiler(compiler.SQLCompiler):
         else:
             return ""
 
+    def visit_function(self, func, **kw):
+        text = super(OracleCompiler, self).visit_function(func, **kw)
+        if kw.get("asfrom", False):
+            text = "TABLE (%s)" % func
+        return text
+
+    def visit_table_valued_column(self, element, **kw):
+        text = super(OracleCompiler, self).visit_table_valued_column(
+            element, **kw
+        )
+        text = "COLUMN_VALUE " + text
+        return text
+
     def default_from(self):
         """Called when a ``SELECT`` statement has no froms,
         and no ``FROM`` clause is to be appended.
index ad71db89ee427c08bcd859a578eebb6bc6d73e82..91bb89ea949b3a56a6d5f5e3e428eb750a92fa3f 100644 (file)
@@ -96,6 +96,8 @@ class array(expression.ClauseList, expression.ColumnElement):
 
     __visit_name__ = "array"
 
+    stringify_dialect = "postgresql"
+
     def __init__(self, clauses, **kw):
         clauses = [
             coercions.expect(roles.ExpressionElementRole, c) for c in clauses
index 08d7cd9e5821e28ddc5a4000f59090ae153c36cb..f067e6537e3efe3966948dc408217a33938301c3 100644 (file)
@@ -1038,26 +1038,157 @@ dialect in conjunction with the :class:`_schema.Table` construct:
     `PostgreSQL CREATE TABLE options
     <http://www.postgresql.org/docs/current/static/sql-createtable.html>`_
 
-Table values, Row and Tuple objects
------------------------------------
+.. _postgresql_table_valued_overview:
 
-Row Types
-^^^^^^^^^
+Table values, Table and Column valued functions, Row and Tuple objects
+-----------------------------------------------------------------------
+
+PostgreSQL makes great use of modern SQL forms such as table-valued functions,
+tables and rows as values.   These constructs are commonly used as part
+of PostgreSQL's support for complex datatypes such as JSON, ARRAY, and other
+datatypes.  SQLAlchemy's SQL expression language has native support for
+most table-valued and row-valued forms.
+
+.. _postgresql_table_valued:
+
+Table-Valued Functions
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Many PostgreSQL built-in functions are intended to be used in the FROM clause
+of a SELECT statement, and are capable of returning table rows or sets of table
+rows. A large portion of PostgreSQL's JSON functions for example such as
+``json_array_elements()``, ``json_object_keys()``, ``json_each_text()``,
+``json_each()``, ``json_to_record()``, ``json_populate_recordset()`` use such
+forms. These classes of SQL function calling forms in SQLAlchemy are available
+using the :meth:`_functions.FunctionElement.table_valued` method in conjunction
+with :class:`_function.Function` objects generated from the :data:`_sql.func`
+namespace.
 
-Built-in support for rendering a ``ROW`` is not available yet, however the
-:func:`_expression.tuple_` may be used in its place. Another alternative is
-to use the :attr:`_sa.func` generator with ``func.ROW`` ::
+Examples from PostgreSQL's reference documentation follow below:
 
-    table.select().where(
-        tuple_(table.c.id, table.c.fk) > (1,2)
-    ).where(func.ROW(table.c.id, table.c.fk) < func.ROW(3, 7))
+* ``json_each()``::
 
-Will generate the row-wise comparison::
+    >>> from sqlalchemy import select, func
+    >>> stmt = select(func.json_each('{"a":"foo", "b":"bar"}').table_valued("key", "value"))
+    >>> print(stmt)
+    SELECT anon_1.key, anon_1.value
+    FROM json_each(:json_each_1) AS anon_1
 
-    SELECT *
-    FROM table
-    WHERE (id, fk) > (1, 2)
-    AND ROW(id, fk) < ROW(3, 7)
+* ``json_populate_record()``::
+
+    >>> from sqlalchemy import select, func, literal_column
+    >>> stmt = select(
+    ...     func.json_populate_record(
+    ...         literal_column("null::myrowtype"),
+    ...         '{"a":1,"b":2}'
+    ...     ).table_valued("a", "b", name="x")
+    ... )
+    >>> print(stmt)
+    SELECT x.a, x.b
+    FROM json_populate_record(null::myrowtype, :json_populate_record_1) AS x
+
+* ``json_to_record()`` - this form uses a PostgreSQL specific form of derived
+  columns in the alias, where we may make use of :func:`_sql.column` elements with
+  types to produce them.  The :meth:`_functions.FunctionElement.table_valued`
+  method produces  a :class:`_sql.TableValuedAlias` construct, and the method
+  :meth:`_sql.TableValuedAlias.render_derived` method sets up the derived
+  columns specification::
+
+    >>> from sqlalchemy import select, func, column, Integer, Text
+    >>> stmt = select(
+    ...     func.json_to_record('{"a":1,"b":[1,2,3],"c":"bar"}').table_valued(
+    ...         column("a", Integer), column("b", Text), column("d", Text),
+    ...     ).render_derived(name="x", with_types=True)
+    ... )
+    >>> print(stmt)
+    SELECT x.a, x.b, x.d
+    FROM json_to_record(:json_to_record_1) AS x(a INTEGER, b TEXT, d TEXT)
+
+* ``WITH ORDINALITY`` - part of the SQL standard, ``WITH ORDINALITY`` adds an
+  ordinal counter to the output of a function and is accepted by a limited set
+  of PostgreSQL functions including ``unnest()`` and ``generate_series()``. The
+  :meth:`_functions.FunctionElement.table_valued` method accepts a keyword
+  parameter ``with_ordinality`` for this purpose, which accepts the string name
+  that will be applied to the "ordinality" column::
+
+    >>> from sqlalchemy import select, func
+    >>> stmt = select(
+    ...     func.generate_series(4, 1, -1).table_valued("value", with_ordinality="ordinality")
+    ... )
+    >>> print(stmt)
+    SELECT anon_1.value, anon_1.ordinality
+    FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) WITH ORDINALITY AS anon_1
+
+.. versionadded:: 1.4.0b2
+
+.. seealso::
+
+    :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial`
+
+.. _postgresql_column_valued:
+
+Column Valued Functions
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Similar to the table valued function, a column valued function is present
+in the FROM clause, but delivers itself to the columns clause as a single
+scalar value.  PostgreSQL functions such as ``json_array_elements()``,
+``unnest()`` and ``generate_series()`` may use this form. Column valued functions are available using the
+:meth:`_functions.FunctionElement.column_valued` method of :class:`_functions.FunctionElement`:
+
+* ``json_array_elements()``::
+
+    >>> from sqlalchemy import select, func
+    >>> stmt = select(func.json_array_elements('["one", "two"]').column_valued("x"))
+    >>> print(stmt)
+    SELECT x
+    FROM json_array_elements(:json_array_elements_1) AS x
+
+* ``unnest()`` - in order to generate a PostgreSQL ARRAY literal, the
+  :func:`_postgresql.array` construct may be used::
+
+
+    >>> from sqlalchemy.dialects.postgresql import array
+    >>> from sqlalchemy import select, func
+    >>> stmt = select(func.unnest(array([1, 2])).column_valued())
+    >>> print(stmt)
+    SELECT anon_1
+    FROM unnest(ARRAY[%(param_1)s, %(param_2)s]) AS anon_1
+
+  The function can of course be used against an existing table-bound column
+  that's of type :class:`_types.ARRAY`::
+
+    >>> from sqlalchemy import table, column, ARRAY, Integer
+    >>> from sqlalchemy import select, func
+    >>> t = table("t", column('value', ARRAY(Integer)))
+    >>> stmt = select(func.unnest(t.c.value).column_valued("unnested_value"))
+    >>> print(stmt)
+    SELECT unnested_value
+    FROM unnest(t.value) AS unnested_value
+
+.. seealso::
+
+    :ref:`tutorial_functions_column_valued` - in the :ref:`unified_tutorial`
+
+
+Row Types
+^^^^^^^^^
+
+Built-in support for rendering a ``ROW`` may be approximated using
+``func.ROW`` with the :attr:`_sa.func` namespace, or by using the
+:func:`_sql.tuple_` construct::
+
+    >>> from sqlalchemy import table, column, func, tuple_
+    >>> t = table("t", column("id"), column("fk"))
+    >>> stmt = t.select().where(
+    ...     tuple_(t.c.id, t.c.fk) > (1,2)
+    ... ).where(
+    ...     func.ROW(t.c.id, t.c.fk) < func.ROW(3, 7)
+    ... )
+    >>> print(stmt)
+    SELECT t.id, t.fk
+    FROM t
+    WHERE (t.id, t.fk) > (:param_1, :param_2) AND ROW(t.id, t.fk) < ROW(:ROW_1, :ROW_2)
 
 .. seealso::
 
@@ -1067,20 +1198,27 @@ Will generate the row-wise comparison::
     `PostgreSQL Row Constructor Comparison
     <https://www.postgresql.org/docs/current/functions-comparisons.html#ROW-WISE-COMPARISON>`_
 
-Table Types
-^^^^^^^^^^^
+Table Types passed to Functions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-PostgreSQL also supports passing a table as an argument to a function. This
-is not available yet in sqlalchemy, however the
-:func:`_expression.literal_column` function with the name of the table may be
-used in its place::
+PostgreSQL supports passing a table as an argument to a function, which it
+refers towards as a "record" type. SQLAlchemy :class:`_sql.FromClause` objects
+such as :class:`_schema.Table` support this special form using the
+:meth:`_sql.FromClause.table_valued` method, which is comparable to the
+:meth:`_functions.FunctionElement.table_valued` method except that the collection
+of columns is already established by that of the :class:`_sql.FromClause`
+itself::
 
-    select('*').select_from(func.my_function(literal_column('my_table')))
 
-Will generate the SQL::
+    >>> from sqlalchemy import table, column, func, select
+    >>> a = table( "a", column("id"), column("x"), column("y"))
+    >>> stmt = select(func.row_to_json(a.table_valued()))
+    >>> print(stmt)
+    SELECT row_to_json(a) AS row_to_json_1
+    FROM a
+
+.. versionadded:: 1.4.0b2
 
-    SELECT *
-    FROM my_function(my_table)
 
 ARRAY Types
 -----------
index fe9455fb96f3cc88ca4340fd86c008a088d2a925..c76c9ae148989d0c6adb34659c5cdb39566e9672 100644 (file)
@@ -935,13 +935,11 @@ class Executable(roles.CoerceTextStatementRole, Generative):
 
     @util.deprecated_20(
         ":meth:`.Executable.scalar`",
-        alternative="All statement execution in SQLAlchemy 2.0 is performed "
-        "by the :meth:`_engine.Connection.execute` method of "
+        alternative="Scalar execution in SQLAlchemy 2.0 is performed "
+        "by the :meth:`_engine.Connection.scalar` method of "
         ":class:`_engine.Connection`, "
-        "or in the ORM by the :meth:`.Session.execute` method of "
-        ":class:`.Session`; the :meth:`_future.Result.scalar` "
-        "method can then be "
-        "used to return a scalar result.",
+        "or in the ORM by the :meth:`.Session.scalar` method of "
+        ":class:`.Session`.",
     )
     def scalar(self, *multiparams, **params):
         """Compile and execute this :class:`.Executable`, returning the
index 3b972be41d39b1862b60dbec4fabadad51e5a6ab..d4b208c56d1b4cf453879b7a1b9b3e1e608302a0 100644 (file)
@@ -437,6 +437,20 @@ class ExpressionElementImpl(_ColumnCoercions, RoleImpl):
             except exc.ArgumentError as err:
                 self._raise_for_expected(element, err=err)
 
+    def _raise_for_expected(self, element, argname=None, resolved=None, **kw):
+        if isinstance(element, roles.AnonymizedFromClauseRole):
+            advice = (
+                "To create a "
+                "column expression from a FROM clause row "
+                "as a whole, use the .table_valued() method."
+            )
+        else:
+            advice = None
+
+        return super(ExpressionElementImpl, self)._raise_for_expected(
+            element, argname=argname, resolved=resolved, advice=advice, **kw
+        )
+
 
 class BinaryElementImpl(ExpressionElementImpl, RoleImpl):
 
@@ -603,6 +617,13 @@ class ColumnArgumentOrKeyImpl(_ReturnsStringKey, RoleImpl):
     __slots__ = ()
 
 
+class StrAsPlainColumnImpl(_CoerceLiterals, RoleImpl):
+    __slots__ = ()
+
+    def _text_coercion(self, element, argname=None):
+        return elements.ColumnClause(element)
+
+
 class ByOfImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl, roles.ByOfRole):
 
     __slots__ = ()
index 3925b251d7f6127ae8d5f89f4ac4a6f82b560b58..696b38e649ae3a38862d936b1d48574d3dfbabdd 100644 (file)
@@ -494,6 +494,12 @@ class Compiled(object):
         else:
             raise exc.ObjectNotExecutableError(self.statement)
 
+    def visit_unsupported_compilation(self, element, err):
+        util.raise_(
+            exc.UnsupportedCompilationError(self, type(element)),
+            replace_context=err,
+        )
+
     @property
     def sql_compiler(self):
         """Return a Compiled that is capable of processing SQL expressions.
@@ -1718,13 +1724,18 @@ class SQLCompiler(Compiled):
             extract.expr._compiler_dispatch(self, **kwargs),
         )
 
+    def visit_scalar_function_column(self, element, **kw):
+        compiled_fn = self.visit_function(element.fn, **kw)
+        compiled_col = self.visit_column(element, **kw)
+        return "(%s).%s" % (compiled_fn, compiled_col)
+
     def visit_function(self, func, add_to_result_map=None, **kwargs):
         if add_to_result_map is not None:
             add_to_result_map(func.name, func.name, (), func.type)
 
         disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
         if disp:
-            return disp(func, **kwargs)
+            text = disp(func, **kwargs)
         else:
             name = FUNCTIONS.get(func.__class__, None)
             if name:
@@ -1739,7 +1750,7 @@ class SQLCompiler(Compiled):
                     else name
                 )
                 name = name + "%(expr)s"
-            return ".".join(
+            text = ".".join(
                 [
                     (
                         self.preparer.quote(tok)
@@ -1752,6 +1763,10 @@ class SQLCompiler(Compiled):
                 + [name]
             ) % {"expr": self.function_argspec(func, **kwargs)}
 
+        if func._with_ordinality:
+            text += " WITH ORDINALITY"
+        return text
+
     def visit_next_value_func(self, next_value, **kw):
         return self.visit_sequence(next_value.sequence)
 
@@ -2533,6 +2548,15 @@ class SQLCompiler(Compiled):
             else:
                 return self.preparer.format_alias(cte, cte_name)
 
+    def visit_table_valued_alias(self, element, **kw):
+        if element._is_lateral:
+            return self.visit_lateral(element, **kw)
+        else:
+            return self.visit_alias(element, **kw)
+
+    def visit_table_valued_column(self, element, **kw):
+        return self.visit_column(element, **kw)
+
     def visit_alias(
         self,
         alias,
@@ -2584,6 +2608,24 @@ class SQLCompiler(Compiled):
             ret = inner + self.get_render_as_alias_suffix(
                 self.preparer.format_alias(alias, alias_name)
             )
+
+            if alias._supports_derived_columns and alias._render_derived:
+                ret += "(%s)" % (
+                    ", ".join(
+                        "%s%s"
+                        % (
+                            col.name,
+                            " %s"
+                            % self.dialect.type_compiler.process(
+                                col.type, **kwargs
+                            )
+                            if alias._render_derived_w_types
+                            else "",
+                        )
+                        for col in alias.c
+                    )
+                )
+
             if fromhints and alias in fromhints:
                 ret = self.format_from_hint_text(
                     ret, alias, fromhints[alias], iscrud
@@ -2600,9 +2642,9 @@ class SQLCompiler(Compiled):
         kw["subquery"] = True
         return self.visit_alias(subquery, **kw)
 
-    def visit_lateral(self, lateral, **kw):
+    def visit_lateral(self, lateral_, **kw):
         kw["lateral"] = True
-        return "LATERAL %s" % self.visit_alias(lateral, **kw)
+        return "LATERAL %s" % self.visit_alias(lateral_, **kw)
 
     def visit_tablesample(self, tablesample, asfrom=False, **kw):
         text = "%s TABLESAMPLE %s" % (
@@ -3737,6 +3779,20 @@ class StrSQLCompiler(SQLCompiler):
     def _fallback_column_name(self, column):
         return "<name unknown>"
 
+    @util.preload_module("sqlalchemy.engine.url")
+    def visit_unsupported_compilation(self, element, err, **kw):
+        if element.stringify_dialect != "default":
+            url = util.preloaded.engine_url
+            dialect = url.URL.create(element.stringify_dialect).get_dialect()()
+
+            compiler = dialect.statement_compiler(dialect, None)
+            if not isinstance(compiler, StrSQLCompiler):
+                return compiler.process(element)
+
+        return super(StrSQLCompiler, self).visit_unsupported_compilation(
+            element, err
+        )
+
     def visit_getitem_binary(self, binary, operator, **kw):
         return "%s[%s]" % (
             self.process(binary.left, **kw),
index a9f21cd5f8405a2b22238874b083b2078d620c0e..1bdef1932ef27e5703c923d8d7102ae2f46ea139 100644 (file)
@@ -3856,6 +3856,8 @@ class Over(ColumnElement):
 
         .. seealso::
 
+            :ref:`tutorial_window_functions` - in the :ref:`unified_tutorial`
+
             :data:`.expression.func`
 
             :func:`_expression.within_group`
@@ -4007,6 +4009,9 @@ class WithinGroup(ColumnElement):
 
         .. seealso::
 
+            :ref:`tutorial_functions_within_group` - in the
+            :ref:`unified_tutorial`
+
             :data:`.expression.func`
 
             :func:`_expression.over`
@@ -4104,8 +4109,10 @@ class FunctionFilter(ColumnElement):
 
         .. seealso::
 
-            :meth:`.FunctionElement.filter`
+            :ref:`tutorial_functions_within_group` - in the
+            :ref:`unified_tutorial`
 
+            :meth:`.FunctionElement.filter`
 
         """
         self.func = func
@@ -4317,11 +4324,82 @@ class Label(roles.LabeledColumnExprRole, ColumnElement):
         return self.key, e
 
 
+class NamedColumn(ColumnElement):
+    is_literal = False
+    table = None
+
+    def _compare_name_for_result(self, other):
+        return (hasattr(other, "name") and self.name == other.name) or (
+            hasattr(other, "_label") and self._label == other._label
+        )
+
+    @util.memoized_property
+    def description(self):
+        if util.py3k:
+            return self.name
+        else:
+            return self.name.encode("ascii", "backslashreplace")
+
+    @HasMemoized.memoized_attribute
+    def _key_label(self):
+        if self.key != self.name:
+            return self._gen_label(self.key)
+        else:
+            return self._label
+
+    @HasMemoized.memoized_attribute
+    def _label(self):
+        return self._gen_label(self.name)
+
+    @HasMemoized.memoized_attribute
+    def _render_label_in_columns_clause(self):
+        return True
+
+    def _gen_label(self, name, dedupe_on_key=True):
+        return name
+
+    def _bind_param(self, operator, obj, type_=None, expanding=False):
+        return BindParameter(
+            self.key,
+            obj,
+            _compared_to_operator=operator,
+            _compared_to_type=self.type,
+            type_=type_,
+            unique=True,
+            expanding=expanding,
+        )
+
+    def _make_proxy(
+        self,
+        selectable,
+        name=None,
+        name_is_truncatable=False,
+        disallow_is_literal=False,
+        **kw
+    ):
+        c = ColumnClause(
+            coercions.expect(roles.TruncatedLabelRole, name or self.name)
+            if name_is_truncatable
+            else (name or self.name),
+            type_=self.type,
+            _selectable=selectable,
+            is_literal=False,
+        )
+        c._propagate_attrs = selectable._propagate_attrs
+        if name is None:
+            c.key = self.key
+        c._proxies = [self]
+        if selectable._is_clone_of is not None:
+            c._is_clone_of = selectable._is_clone_of.columns.get(c.key)
+        return c.key, c
+
+
 class ColumnClause(
     roles.DDLReferredColumnRole,
     roles.LabeledColumnExprRole,
+    roles.StrAsPlainColumnRole,
     Immutable,
-    ColumnElement,
+    NamedColumn,
 ):
     """Represents a column expression from any textual string.
 
@@ -4360,6 +4438,9 @@ class ColumnClause(
 
     """
 
+    table = None
+    is_literal = False
+
     __visit_name__ = "column"
 
     _traverse_internals = [
@@ -4470,27 +4551,6 @@ class ColumnClause(
         self.type = type_api.to_instance(type_)
         self.is_literal = is_literal
 
-    def _compare_name_for_result(self, other):
-        if (
-            self.is_literal
-            or self.table is None
-            or self.table._is_textual
-            or not hasattr(other, "proxy_set")
-            or (
-                isinstance(other, ColumnClause)
-                and (
-                    other.is_literal
-                    or other.table is None
-                    or other.table._is_textual
-                )
-            )
-        ):
-            return (hasattr(other, "name") and self.name == other.name) or (
-                hasattr(other, "_label") and self._label == other._label
-            )
-        else:
-            return other.proxy_set.intersection(self.proxy_set)
-
     def get_children(self, column_tables=False, **kw):
         # override base get_children() to not return the Table
         # or selectable that is parent to this column.  Traversals
@@ -4505,24 +4565,6 @@ class ColumnClause(
         else:
             return []
 
-    @util.memoized_property
-    def description(self):
-        if util.py3k:
-            return self.name
-        else:
-            return self.name.encode("ascii", "backslashreplace")
-
-    @HasMemoized.memoized_attribute
-    def _key_label(self):
-        if self.key != self.name:
-            return self._gen_label(self.key)
-        else:
-            return self._label
-
-    @HasMemoized.memoized_attribute
-    def _label(self):
-        return self._gen_label(self.name)
-
     @HasMemoized.memoized_attribute
     def _render_label_in_columns_clause(self):
         return self.table is not None
@@ -4531,6 +4573,27 @@ class ColumnClause(
     def _ddl_label(self):
         return self._gen_label(self.name, dedupe_on_key=False)
 
+    def _compare_name_for_result(self, other):
+        if (
+            self.is_literal
+            or self.table is None
+            or self.table._is_textual
+            or not hasattr(other, "proxy_set")
+            or (
+                isinstance(other, ColumnClause)
+                and (
+                    other.is_literal
+                    or other.table is None
+                    or other.table._is_textual
+                )
+            )
+        ):
+            return (hasattr(other, "name") and self.name == other.name) or (
+                hasattr(other, "_label") and self._label == other._label
+            )
+        else:
+            return other.proxy_set.intersection(self.proxy_set)
+
     def _gen_label(self, name, dedupe_on_key=True):
         t = self.table
         if self.is_literal:
@@ -4575,17 +4638,6 @@ class ColumnClause(
         else:
             return name
 
-    def _bind_param(self, operator, obj, type_=None, expanding=False):
-        return BindParameter(
-            self.key,
-            obj,
-            _compared_to_operator=operator,
-            _compared_to_type=self.type,
-            type_=type_,
-            unique=True,
-            expanding=expanding,
-        )
-
     def _make_proxy(
         self,
         selectable,
@@ -4627,6 +4679,25 @@ class ColumnClause(
         return c.key, c
 
 
+class TableValuedColumn(NamedColumn):
+    __visit_name__ = "table_valued_column"
+
+    _traverse_internals = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("scalar_alias", InternalTraversal.dp_clauseelement),
+    ]
+
+    def __init__(self, scalar_alias, type_):
+        self.scalar_alias = scalar_alias
+        self.key = self.name = scalar_alias.name
+        self.type = type_
+
+    @property
+    def _from_objects(self):
+        return [self.scalar_alias]
+
+
 class CollationClause(ColumnElement):
     __visit_name__ = "collation"
 
index 3eb1443a34393ca8024c61277723b6b5acda4752..46f13fedda96acc6267bfc7827421b7a7f938b6e 100644 (file)
@@ -34,6 +34,7 @@ __all__ = [
     "Select",
     "Selectable",
     "TableClause",
+    "TableValuedAlias",
     "Update",
     "Values",
     "alias",
@@ -172,6 +173,7 @@ from .selectable import Subquery  # noqa
 from .selectable import subquery  # noqa
 from .selectable import TableClause  # noqa
 from .selectable import TableSample  # noqa
+from .selectable import TableValuedAlias  # noqa
 from .selectable import TextAsFrom  # noqa
 from .selectable import TextualSelect  # noqa
 from .selectable import Values  # noqa
index a9ea98d04779eb792344a77a4d270669f7e98e17..78f7ead2ebc8e77e454c2e8eca5b497503815326 100644 (file)
@@ -17,6 +17,7 @@ from . import sqltypes
 from . import util as sqlutil
 from .base import ColumnCollection
 from .base import Executable
+from .base import Generative
 from .base import HasMemoized
 from .elements import _type_from_args
 from .elements import BinaryExpression
@@ -28,11 +29,12 @@ from .elements import Extract
 from .elements import FunctionFilter
 from .elements import Grouping
 from .elements import literal_column
+from .elements import NamedColumn
 from .elements import Over
 from .elements import WithinGroup
-from .selectable import Alias
 from .selectable import FromClause
 from .selectable import Select
+from .selectable import TableValuedAlias
 from .visitors import InternalTraversal
 from .visitors import TraversibleType
 from .. import util
@@ -63,7 +65,7 @@ def register_function(identifier, fn, package="_default"):
     reg[identifier] = fn
 
 
-class FunctionElement(Executable, ColumnElement, FromClause):
+class FunctionElement(Executable, ColumnElement, FromClause, Generative):
     """Base for SQL function-oriented constructs.
 
     .. seealso::
@@ -80,11 +82,17 @@ class FunctionElement(Executable, ColumnElement, FromClause):
 
     """
 
-    _traverse_internals = [("clause_expr", InternalTraversal.dp_clauseelement)]
+    _traverse_internals = [
+        ("clause_expr", InternalTraversal.dp_clauseelement),
+        ("_with_ordinality", InternalTraversal.dp_boolean),
+        ("_table_value_type", InternalTraversal.dp_has_cache_key),
+    ]
 
     packagenames = ()
 
     _has_args = False
+    _with_ordinality = False
+    _table_value_type = None
 
     def __init__(self, *clauses, **kwargs):
         r"""Construct a :class:`.FunctionElement`.
@@ -123,27 +131,169 @@ class FunctionElement(Executable, ColumnElement, FromClause):
             self, multiparams, params, execution_options
         )
 
+    def scalar_table_valued(self, name, type_=None):
+        """Return a column expression that's against this
+        :class:`_functions.FunctionElement` as a scalar
+        table-valued expression.
+
+        The returned expression is similar to that returned by a single column
+        accessed off of a :meth:`_functions.FunctionElement.table_valued`
+        construct, except no FROM clause is generated; the function is rendered
+        in the similar way as a scalar subquery.
+
+        E.g.::
+
+            >>> from sqlalchemy import func, select
+            >>> fn = func.jsonb_each("{'k', 'v'}").scalar_table_valued("key")
+            >>> print(select(fn))
+            SELECT (jsonb_each(:jsonb_each_1)).key
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :meth:`_functions.FunctionElement.table_valued`
+
+            :meth:`_functions.FunctionElement.alias`
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+        """  # noqa E501
+
+        return ScalarFunctionColumn(self, name, type_)
+
+    def table_valued(self, *expr, **kw):
+        """Return a :class:`_sql.TableValuedAlias` representation of this
+        :class:`_functions.FunctionElement` with table-valued expressions added.
+
+        e.g.::
+
+            >>> fn = (
+            ...     func.generate_series(1, 5).
+            ...     table_valued("value", "start", "stop", "step")
+            ... )
+
+            >>> print(select(fn))
+            SELECT anon_1.value, anon_1.start, anon_1.stop, anon_1.step
+            FROM generate_series(:generate_series_1, :generate_series_2) AS anon_1
+
+            >>> print(select(fn.c.value, fn.c.stop).where(fn.c.value > 2))
+            SELECT anon_1.value, anon_1.stop
+            FROM generate_series(:generate_series_1, :generate_series_2) AS anon_1
+            WHERE anon_1.value > :value_1
+
+        A WITH ORDINALITY expression may be generated by passing the keyword
+        argument "with_ordinality"::
+
+            >>> fn = func.generate_series(4, 1, -1).table_valued("gen", with_ordinality="ordinality")
+            >>> print(select(fn))
+            SELECT anon_1.gen, anon_1.ordinality
+            FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) WITH ORDINALITY AS anon_1
+
+        :param \*expr: A series of string column names that will be added to the
+         ``.c`` collection of the resulting :class:`_sql.TableValuedAlias`
+         construct as columns.  :func:`_sql.column` objects with or without
+         datatypes may also be used.
+
+        :param name: optional name to assign to the alias name that's generated.
+         If omitted, a unique anonymizing name is used.
+
+        :param with_ordinality: string name that when present results in the
+         ``WITH ORDINALITY`` clause being added to the alias, and the given
+         string name will be added as a column to the .c collection
+         of the resulting :class:`_sql.TableValuedAlias`.
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial`
+
+            :ref:`postgresql_table_valued` - in the :ref:`postgresql_toplevel` documentation
+
+            :meth:`_functions.FunctionElement.scalar_table_valued` - variant of
+            :meth:`_functions.FunctionElement.table_valued` which delivers the
+            complete table valued expression as a scalar column expression
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+            :meth:`_sql.TableValuedAlias.render_derived` - renders the alias
+            using a derived column clause, e.g. ``AS name(col1, col2, ...)``
+
+        """  # noqa 501
+
+        new_func = self._generate()
+
+        with_ordinality = kw.pop("with_ordinality", None)
+        name = kw.pop("name", None)
+
+        if with_ordinality:
+            expr += (with_ordinality,)
+            new_func._with_ordinality = True
+
+        new_func.type = new_func._table_value_type = sqltypes.TableValueType(
+            *expr
+        )
+
+        return new_func.alias(name=name)
+
+    def column_valued(self, name=None):
+        """Return this :class:`_functions.FunctionElement` as a column expression that
+        selects from itself as a FROM clause.
+
+        E.g.::
+
+            >>> from sqlalchemy import select, func
+            >>> gs = func.generate_series(1, 5, -1).column_valued()
+            >>> print(select(gs))
+            SELECT anon_1
+            FROM generate_series(:generate_series_1, :generate_series_2, :generate_series_3) AS anon_1
+
+        This is shorthand for::
+
+            gs = func.generate_series(1, 5, -1).alias().column
+
+
+        .. seealso::
+
+            :ref:`tutorial_functions_column_valued` - in the :ref:`unified_tutorial`
+
+            :ref:`postgresql_column_valued` - in the :ref:`postgresql_toplevel` documentation
+
+            :meth:`_functions.FunctionElement.table_valued`
+
+        """  # noqa 501
+
+        return self.alias(name=name).column
+
     @property
     def columns(self):
         r"""The set of columns exported by this :class:`.FunctionElement`.
 
-        Function objects currently have no result column names built in;
-        this method returns a single-element column collection with
-        an anonymously named column.
+        This is a placeholder collection that allows the function to be
+        placed in the FROM clause of a statement::
 
-        An interim approach to providing named columns for a function
-        as a FROM clause is to build a :func:`_expression.select` with the
-        desired columns::
+            >>> from sqlalchemy import column, select, func
+            >>> stmt = select(column('x'), column('y')).select_from(func.myfunction())
+            >>> print(stmt)
+            SELECT x, y FROM myfunction()
 
-            from sqlalchemy.sql import column
+        The above form is a legacy feature that is now superseded by the
+        fully capable :meth:`_functions.FunctionElement.table_valued`
+        method; see that method for details.
 
-            stmt = select(column('x'), column('y')).\
-                select_from(func.myfunction())
+        .. seealso::
 
+            :meth:`_functions.FunctionElement.table_valued` - generates table-valued
+            SQL function expressions.
 
-        """
-        col = self.label(None)
-        return ColumnCollection(columns=[(col.key, col)])
+        """  # noqa E501
+        if self.type._is_table_value:
+            cols = self.type._elements
+        else:
+            cols = [self.label(None)]
+
+        return ColumnCollection(columns=[(col.key, col) for col in cols])
 
     @HasMemoized.memoized_attribute
     def clauses(self):
@@ -170,6 +320,12 @@ class FunctionElement(Executable, ColumnElement, FromClause):
 
         See :func:`_expression.over` for a full description.
 
+        .. seealso::
+
+            :func:`_expression.over`
+
+            :ref:`tutorial_window_functions` - in the :ref:`unified_tutorial`
+
         """
         return Over(
             self,
@@ -191,6 +347,12 @@ class FunctionElement(Executable, ColumnElement, FromClause):
         .. versionadded:: 1.1
 
 
+        .. seealso::
+
+            :ref:`tutorial_functions_within_group` -
+            in the :ref:`unified_tutorial`
+
+
         """
         return WithinGroup(self, *order_by)
 
@@ -213,6 +375,9 @@ class FunctionElement(Executable, ColumnElement, FromClause):
 
         .. seealso::
 
+            :ref:`tutorial_functions_within_group` -
+            in the :ref:`unified_tutorial`
+
             :class:`.FunctionFilter`
 
             :func:`.funcfilter`
@@ -226,6 +391,9 @@ class FunctionElement(Executable, ColumnElement, FromClause):
     def as_comparison(self, left_index, right_index):
         """Interpret this expression as a boolean comparison between two values.
 
+        This method is used for an ORM use case described at
+        :ref:`relationship_custom_operator_sql_function`.
+
         A hypothetical SQL function "is_equal()" which compares to values
         for equality would be written in the Core expression language as::
 
@@ -250,9 +418,8 @@ class FunctionElement(Executable, ColumnElement, FromClause):
         to manipulate the "left" and "right" sides of the ON clause of a JOIN
         expression. The purpose of this method is to provide a SQL function
         construct that can also supply this information to the ORM, when used
-        with the :paramref:`_orm.relationship.primaryjoin` parameter.
-        The return
-        value is a containment object called :class:`.FunctionAsBinary`.
+        with the :paramref:`_orm.relationship.primaryjoin` parameter. The
+        return value is a containment object called :class:`.FunctionAsBinary`.
 
         An ORM example is as follows::
 
@@ -286,6 +453,11 @@ class FunctionElement(Executable, ColumnElement, FromClause):
 
         .. versionadded:: 1.3
 
+        .. seealso::
+
+            :ref:`relationship_custom_operator_sql_function` -
+            example use within the ORM
+
         """
         return FunctionAsBinary(self, left_index, right_index)
 
@@ -305,37 +477,65 @@ class FunctionElement(Executable, ColumnElement, FromClause):
 
         return None
 
-    def alias(self, name=None, flat=False):
+    def alias(self, name=None):
         r"""Produce a :class:`_expression.Alias` construct against this
         :class:`.FunctionElement`.
 
+        .. tip::
+
+            The :meth:`_functions.FunctionElement.alias` method is part of the
+            mechanism by which "table valued" SQL functions are created.
+            However, most use cases are covered by higher level methods on
+            :class:`_functions.FunctionElement` including
+            :meth:`_functions.FunctionElement.table_valued`, and
+            :meth:`_functions.FunctionElement.column_valued`.
+
         This construct wraps the function in a named alias which
         is suitable for the FROM clause, in the style accepted for example
-        by PostgreSQL.
+        by PostgreSQL.  A column expression is also provided using the
+        special ``.column`` attribute, which may
+        be used to refer to the output of the function as a scalar value
+        in the columns or where clause, for a backend such as PostgreSQL.
+
+        For a full table-valued expression, use the
+        :meth:`_function.FunctionElement.table_valued` method first to
+        establish named columns.
 
         e.g.::
 
-            from sqlalchemy.sql import column
+            >>> from sqlalchemy import func, select, column
+            >>> data_view = func.unnest([1, 2, 3]).alias("data_view")
+            >>> print(select(data_view.column))
+            SELECT data_view
+            FROM unnest(:unnest_1) AS data_view
 
-            stmt = select(column('data_view')).\
-                select_from(SomeTable).\
-                select_from(func.unnest(SomeTable.data).alias('data_view')
-            )
+        The :meth:`_functions.FunctionElement.column_valued` method provides
+        a shortcut for the above pattern::
 
-        Would produce:
+            >>> data_view = func.unnest([1, 2, 3]).column_valued("data_view")
+            >>> print(select(data_view))
+            SELECT data_view
+            FROM unnest(:unnest_1) AS data_view
 
-        .. sourcecode:: sql
+        .. versionadded:: 1.4.0b2  Added the ``.column`` accessor
 
-            SELECT data_view
-            FROM sometable, unnest(sometable.data) AS data_view
+        .. seealso::
+
+            :ref:`tutorial_functions_table_valued` -
+            in the :ref:`unified_tutorial`
+
+            :meth:`_functions.FunctionElement.table_valued`
+
+            :meth:`_functions.FunctionElement.scalar_table_valued`
+
+            :meth:`_functions.FunctionElement.column_valued`
 
-        .. versionadded:: 0.9.8 The :meth:`.FunctionElement.alias` method
-           is now supported.  Previously, this method's behavior was
-           undefined and did not behave consistently across versions.
 
         """
 
-        return Alias._construct(self, name)
+        return TableValuedAlias._construct(
+            self, name, table_value_type=self.type
+        )
 
     def select(self):
         """Produce a :func:`_expression.select` construct
@@ -351,6 +551,14 @@ class FunctionElement(Executable, ColumnElement, FromClause):
             s = s.execution_options(**self._execution_options)
         return s
 
+    @util.deprecated_20(
+        ":meth:`.FunctionElement.scalar`",
+        alternative="Scalar execution in SQLAlchemy 2.0 is performed "
+        "by the :meth:`_engine.Connection.scalar` method of "
+        ":class:`_engine.Connection`, "
+        "or in the ORM by the :meth:`.Session.scalar` method of "
+        ":class:`.Session`.",
+    )
     def scalar(self):
         """Execute this :class:`.FunctionElement` against an embedded
         'bind' and return a scalar value.
@@ -365,6 +573,14 @@ class FunctionElement(Executable, ColumnElement, FromClause):
         """
         return self.select().execute().scalar()
 
+    @util.deprecated_20(
+        ":meth:`.FunctionElement.execute`",
+        alternative="All statement execution in SQLAlchemy 2.0 is performed "
+        "by the :meth:`_engine.Connection.execute` method of "
+        ":class:`_engine.Connection`, "
+        "or in the ORM by the :meth:`.Session.execute` method of "
+        ":class:`.Session`.",
+    )
     def execute(self):
         """Execute this :class:`.FunctionElement` against an embedded
         'bind'.
@@ -441,6 +657,24 @@ class FunctionAsBinary(BinaryExpression):
         self.sql_function.clauses.clauses[self.right_index - 1] = value
 
 
+class ScalarFunctionColumn(NamedColumn):
+    __visit_name__ = "scalar_function_column"
+
+    _traverse_internals = [
+        ("name", InternalTraversal.dp_anon_name),
+        ("type", InternalTraversal.dp_type),
+        ("fn", InternalTraversal.dp_clauseelement),
+    ]
+
+    is_literal = False
+    table = None
+
+    def __init__(self, fn, name, type_=None):
+        self.fn = fn
+        self.name = name
+        self.type = sqltypes.to_instance(type_)
+
+
 class _FunctionGenerator(object):
     """Generate SQL function expressions.
 
@@ -586,10 +820,9 @@ class Function(FunctionElement):
 
         func.mypackage.some_function(col1, col2)
 
-
     .. seealso::
 
-        :ref:`coretutorial_functions`
+        :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
 
         :data:`.func` - namespace which produces registered or ad-hoc
         :class:`.Function` instances.
@@ -607,6 +840,23 @@ class Function(FunctionElement):
         ("type", InternalTraversal.dp_type),
     ]
 
+    type = sqltypes.NULLTYPE
+    """A :class:`_types.TypeEngine` object which refers to the SQL return
+    type represented by this SQL function.
+
+    This datatype may be configured when generating a
+    :class:`_functions.Function` object by passing the
+    :paramref:`_functions.Function.type_` parameter, e.g.::
+
+        >>> select(func.lower("some VALUE", type_=String))
+
+    The small number of built-in classes of :class:`_functions.Function` come
+    with a built-in datatype that's appropriate to the class of function and
+    its arguments. For functions that aren't known, the type defaults to the
+    "null type".
+
+    """
+
     @util.deprecated_params(
         bind=(
             "2.0",
@@ -825,6 +1075,8 @@ class next_value(GenericFunction):
 
 
 class AnsiFunction(GenericFunction):
+    """Define a function in "ansi" format, which doesn't render parenthesis."""
+
     inherit_cache = True
 
     def __init__(self, *args, **kwargs):
@@ -856,29 +1108,61 @@ class coalesce(ReturnTypeFromArgs):
     inherit_cache = True
 
 
-class max(ReturnTypeFromArgs):  # noqa
+class max(ReturnTypeFromArgs):  # noqa  A001
+    """The SQL MAX() aggregate function."""
+
     inherit_cache = True
 
 
-class min(ReturnTypeFromArgs):  # noqa
+class min(ReturnTypeFromArgs):  # noqa A001
+    """The SQL MIN() aggregate function."""
+
     inherit_cache = True
 
 
-class sum(ReturnTypeFromArgs):  # noqa
+class sum(ReturnTypeFromArgs):  # noqa A001
+    """The SQL SUM() aggregate function."""
+
     inherit_cache = True
 
 
-class now(GenericFunction):  # noqa
+class now(GenericFunction):
+    """The SQL now() datetime function.
+
+    SQLAlchemy dialects will usually render this particular function
+    in a backend-specific way, such as rendering it as ``CURRENT_TIMESTAMP``.
+
+    """
+
     type = sqltypes.DateTime
     inherit_cache = True
 
 
 class concat(GenericFunction):
+    """The SQL CONCAT() function, which concatenates strings.
+
+    E.g.::
+
+        >>> print(select(func.concat('a', 'b')))
+        SELECT concat(:concat_2, :concat_3) AS concat_1
+
+    String concatenation in SQLAlchemy is more commonly available using the
+    Python ``+`` operator with string datatypes, which will render a
+    backend-specific concatenation operator, such as ::
+
+        >>> print(select(literal("a") + "b"))
+        SELECT :param_1 || :param_2 AS anon_1
+
+
+    """
+
     type = sqltypes.String
     inherit_cache = True
 
 
 class char_length(GenericFunction):
+    """The CHAR_LENGTH() SQL function."""
+
     type = sqltypes.Integer
     inherit_cache = True
 
@@ -887,6 +1171,8 @@ class char_length(GenericFunction):
 
 
 class random(GenericFunction):
+    """The RANDOM() SQL function."""
+
     _has_args = True
     inherit_cache = True
 
@@ -922,46 +1208,64 @@ class count(GenericFunction):
 
 
 class current_date(AnsiFunction):
+    """The CURRENT_DATE() SQL function."""
+
     type = sqltypes.Date
     inherit_cache = True
 
 
 class current_time(AnsiFunction):
+    """The CURRENT_TIME() SQL function."""
+
     type = sqltypes.Time
     inherit_cache = True
 
 
 class current_timestamp(AnsiFunction):
+    """The CURRENT_TIMESTAMP() SQL function."""
+
     type = sqltypes.DateTime
     inherit_cache = True
 
 
 class current_user(AnsiFunction):
+    """The CURRENT_USER() SQL function."""
+
     type = sqltypes.String
     inherit_cache = True
 
 
 class localtime(AnsiFunction):
+    """The localtime() SQL function."""
+
     type = sqltypes.DateTime
     inherit_cache = True
 
 
 class localtimestamp(AnsiFunction):
+    """The localtimestamp() SQL function."""
+
     type = sqltypes.DateTime
     inherit_cache = True
 
 
 class session_user(AnsiFunction):
+    """The SESSION_USER() SQL function."""
+
     type = sqltypes.String
     inherit_cache = True
 
 
 class sysdate(AnsiFunction):
+    """The SYSDATE() SQL function."""
+
     type = sqltypes.DateTime
     inherit_cache = True
 
 
 class user(AnsiFunction):
+    """The USER() SQL function."""
+
     type = sqltypes.String
     inherit_cache = True
 
index 2c4ff75c4e77932a2fa21a40f5498ac7760a8cb3..52743bd506f354bb8ec69eef37d5686737995bec 100644 (file)
@@ -44,6 +44,10 @@ class ColumnArgumentOrKeyRole(ColumnArgumentRole):
     _role_name = "Column expression or string key"
 
 
+class StrAsPlainColumnRole(ColumnArgumentRole):
+    _role_name = "Column expression or string key"
+
+
 class ColumnListRole(SQLRole):
     """Elements suitable for forming comma separated lists of expressions."""
 
index 1bd4f5d8121d6a389fa6723e2713a32f7f8b9480..a273e0c903d9887b5569519d4666b542818374de 100644 (file)
@@ -51,6 +51,7 @@ from .elements import ColumnClause
 from .elements import GroupedElement
 from .elements import Grouping
 from .elements import literal_column
+from .elements import TableValuedColumn
 from .elements import UnaryExpression
 from .visitors import InternalTraversal
 from .. import exc
@@ -623,6 +624,34 @@ class FromClause(roles.AnonymizedFromClauseRole, Selectable):
 
         return Alias._construct(self, name)
 
+    @util.preload_module("sqlalchemy.sql.sqltypes")
+    def table_valued(self):
+        """Return a :class:`_sql.TableValuedColumn` object for this
+        :class:`_expression.FromClause`.
+
+        A :class:`_sql.TableValuedColumn` is a :class:`_sql.ColumnElement` that
+        represents a complete row in a table. Support for this construct is
+        backend dependent, and is supported in various forms by backends
+        such as PostgreSQL, Oracle and SQL Server.
+
+        E.g.::
+
+            >>> from sqlalchemy import select, column, func, table
+            >>> a = table("a", column("id"), column("x"), column("y"))
+            >>> stmt = select(func.row_to_json(a.table_valued()))
+            >>> print(stmt)
+            SELECT row_to_json(a) AS row_to_json_1
+            FROM a
+
+        .. versionadded:: 1.4.0b2
+
+        .. seealso::
+
+            :ref:`tutorial_functions` - in the :ref:`unified_tutorial`
+
+        """
+        return TableValuedColumn(self, type_api.TABLEVALUE)
+
     def tablesample(self, sampling, name=None, seed=None):
         """Return a TABLESAMPLE alias of this :class:`_expression.FromClause`.
 
@@ -1519,6 +1548,8 @@ class AliasedReturnsRows(NoInit, FromClause):
     _is_from_container = True
     named_with_column = True
 
+    _supports_derived_columns = False
+
     _traverse_internals = [
         ("element", InternalTraversal.dp_clauseelement),
         ("name", InternalTraversal.dp_anon_name),
@@ -1678,6 +1709,151 @@ class Alias(roles.DMLTableRole, AliasedReturnsRows):
         ).alias(name=name, flat=flat)
 
 
+class TableValuedAlias(Alias):
+    """An alias against a "table valued" SQL function.
+
+    This construct provides for a SQL function that returns columns
+    to be used in the FROM clause of a SELECT statement.   The
+    object is generated using the :meth:`_functions.FunctionElement.table_valued`
+    method, e.g.::
+
+        >>> from sqlalchemy import select, func
+        >>> fn = func.json_array_elements_text('["one", "two", "three"]').table_valued("value")
+        >>> print(select(fn.c.value))
+        SELECT anon_1.value
+        FROM json_array_elements_text(:json_array_elements_text_1) AS anon_1
+
+    .. versionadded:: 1.4.0b2
+
+    .. seealso::
+
+        :ref:`tutorial_functions_table_valued` - in the :ref:`unified_tutorial`
+
+    """  # noqa E501
+
+    __visit_name__ = "table_valued_alias"
+
+    _supports_derived_columns = True
+    _render_derived = False
+    _render_derived_w_types = False
+
+    _traverse_internals = [
+        ("element", InternalTraversal.dp_clauseelement),
+        ("name", InternalTraversal.dp_anon_name),
+        ("_tableval_type", InternalTraversal.dp_type),
+        ("_render_derived", InternalTraversal.dp_boolean),
+        ("_render_derived_w_types", InternalTraversal.dp_boolean),
+    ]
+
+    def _init(self, selectable, name=None, table_value_type=None):
+        super(TableValuedAlias, self)._init(selectable, name=name)
+
+        self._tableval_type = (
+            type_api.TABLEVALUE
+            if table_value_type is None
+            else table_value_type
+        )
+
+    @HasMemoized.memoized_attribute
+    def column(self):
+        """Return a column expression representing this
+        :class:`_sql.TableValuedAlias`.
+
+        This accessor is used to implement the
+        :meth:`_functions.FunctionElement.column_valued` method. See that
+        method for further details.
+
+        E.g.::
+
+            >>> print(select(func.some_func().table_valued("value").column))
+            SELECT anon_1 FROM some_func() AS anon_1
+
+        .. seealso::
+
+            :meth:`_functions.FunctionElement.column_valued`
+
+        """
+
+        return TableValuedColumn(self, self._tableval_type)
+
+    def alias(self, name=None):
+        """Return a new alias of this :class:`_sql.TableValuedAlias`.
+
+        This creates a distinct FROM object that will be distinguished
+        from the original one when used in a SQL statement.
+
+        """
+
+        tva = TableValuedAlias._construct(self, name=name)
+        if self._render_derived:
+            tva._render_derived = True
+            tva._render_derived_w_types = self._render_derived_w_types
+        return tva
+
+    def lateral(self, name=None):
+        """Return a new :class:`_sql.TableValuedAlias` with the lateral flag set,
+        so that it renders as LATERAL.
+
+        .. seealso::
+
+            :func:`_expression.lateral`
+
+        """
+        tva = self.alias(name=name)
+        tva._is_lateral = True
+        return tva
+
+    def render_derived(self, name=None, with_types=False):
+        """Apply "render derived" to this :class:`_sql.TableValuedAlias`.
+
+        This has the effect of the individual column names listed out
+        after the alias name in the "AS" sequence, e.g.::
+
+            >>> print(
+            ...     select(
+            ...         func.unnest(array(["one", "two", "three"])).
+                        table_valued("x", with_ordinality="o").render_derived()
+            ...     )
+            ... )
+            SELECT anon_1.x, anon_1.o
+            FROM unnest(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s]) WITH ORDINALITY AS anon_1(x, o)
+
+        The ``with_types`` keyword will render column types inline within
+        the alias expression (this syntax currently applies to the
+        PostgreSQL database)::
+
+            >>> print(
+            ...     select(
+            ...         func.json_to_recordset(
+            ...             '[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]'
+            ...         )
+            ...         .table_valued(column("a", Integer), column("b", String))
+            ...         .render_derived(with_types=True)
+            ...     )
+            ... )
+            SELECT anon_1.a, anon_1.b FROM json_to_recordset(:json_to_recordset_1)
+            AS anon_1(a INTEGER, b VARCHAR)
+
+        :param name: optional string name that will be applied to the alias
+         generated.  If left as None, a unique anonymizing name will be used.
+
+        :param with_types: if True, the derived columns will include the
+         datatype specification with each column. This is a special syntax
+         currently known to be required by PostgreSQL for some SQL functions.
+
+        """  # noqa E501
+
+        # note: don't use the @_generative system here, keep a reference
+        # to the original object.  otherwise you can have re-use of the
+        # python id() of the original which can cause name conflicts if
+        # a new anon-name grabs the same identifier as the local anon-name
+        # (just saw it happen on CI)
+        new_alias = TableValuedAlias._construct(self, name=name)
+        new_alias._render_derived = True
+        new_alias._render_derived_w_types = with_types
+        return new_alias
+
+
 class Lateral(AliasedReturnsRows):
     """Represent a LATERAL subquery.
 
@@ -2473,7 +2649,7 @@ class Values(Generative, FromClause):
 
             value_expr = values(
                 column('id', Integer),
-                column('name', Integer),
+                column('name', String),
                 name="my_values"
             ).data(
                 [(1, 'name1'), (2, 'name2'), (3, 'name3')]
index d20c8168d569890784ae61542dbf3214d8c15cb0..3484b3ac8b8421ef12d5f4eef23752acc92de7fb 100644 (file)
@@ -26,6 +26,8 @@ from .elements import _defer_name
 from .elements import quoted_name
 from .elements import Slice
 from .elements import TypeCoerce as type_coerce  # noqa
+from .traversals import HasCacheKey
+from .traversals import InternalTraversal
 from .type_api import Emulated
 from .type_api import NativeForEmulated  # noqa
 from .type_api import to_instance
@@ -2049,17 +2051,18 @@ class JSON(Indexable, TypeEngine):
        JSON types.  Since it supports JSON SQL operations, it only
        works on backends that have an actual JSON type, currently:
 
-       * PostgreSQL - see :class:`_postgresql.JSON` and
-         :class:`_postgresql.JSONB` for backend-specific notes
+       * PostgreSQL - see :class:`sqlalchemy.dialects.postgresql.JSON` and
+         :class:`sqlalchemy.dialects.postgresql.JSONB` for backend-specific
+         notes
 
-       * MySQL as of version 5.7 (MariaDB as of the 10.2 series does not) - see
-         :class:`_mysql.JSON` for backend-specific notes
+       * MySQL - see
+         :class:`sqlalchemy.dialects.mysql.JSON` for backend-specific notes
 
-       * SQLite as of version 3.9 - see :class:`_sqlite.JSON` for
-         backend-specific notes
+       * SQLite as of version 3.9 - see
+         :class:`sqlalchemy.dialects.sqlite.JSON` for backend-specific notes
 
-       * Microsoft SQL Server 2016 and later - see :class:`_mssql.JSON` for
-         backend-specific notes
+       * Microsoft SQL Server 2016 and later - see
+         :class:`sqlalchemy.dialects.mssql.JSON` for backend-specific notes
 
     :class:`_types.JSON` is part of the Core in support of the growing
     popularity of native JSON datatypes.
@@ -2102,9 +2105,10 @@ class JSON(Indexable, TypeEngine):
       .. versionadded:: 1.3.11
 
     Additional operations may be available from the dialect-specific versions
-    of :class:`_types.JSON`, such as :class:`_postgresql.JSON` and
-    :class:`_postgresql.JSONB` which both offer additional PostgreSQL-specific
-    operations.
+    of :class:`_types.JSON`, such as
+    :class:`sqlalchemy.dialects.postgresql.JSON` and
+    :class:`sqlalchemy.dialects.postgresql.JSONB` which both offer additional
+    PostgreSQL-specific operations.
 
     **Casting JSON Elements to Other Types**
 
@@ -2219,13 +2223,13 @@ class JSON(Indexable, TypeEngine):
 
     .. seealso::
 
-        :class:`_postgresql.JSON`
+        :class:`sqlalchemy.dialects.postgresql.JSON`
 
-        :class:`_postgresql.JSONB`
+        :class:`sqlalchemy.dialects.postgresql.JSONB`
 
-        :class:`.mysql.JSON`
+        :class:`sqlalchemy.dialects.mysql.JSON`
 
-        :class:`_sqlite.JSON`
+        :class:`sqlalchemy.dialects.sqlite.JSON`
 
     .. versionadded:: 1.1
 
@@ -2579,11 +2583,11 @@ class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine):
     """Represent a SQL Array type.
 
     .. note::  This type serves as the basis for all ARRAY operations.
-       However, currently **only the PostgreSQL backend has support
-       for SQL arrays in SQLAlchemy**.  It is recommended to use the
-       :class:`_postgresql.ARRAY` type directly when using ARRAY types
-       with PostgreSQL, as it provides additional operators specific
-       to that backend.
+       However, currently **only the PostgreSQL backend has support for SQL
+       arrays in SQLAlchemy**. It is recommended to use the PostgreSQL-specific
+       :class:`sqlalchemy.dialects.postgresql.ARRAY` type directly when using
+       ARRAY types with PostgreSQL, as it provides additional operators
+       specific to that backend.
 
     :class:`_types.ARRAY` is part of the Core in support of various SQL
     standard functions such as :class:`_functions.array_agg`
@@ -2665,7 +2669,7 @@ class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine):
 
     .. seealso::
 
-        :class:`_postgresql.ARRAY`
+        :class:`sqlalchemy.dialects.postgresql.ARRAY`
 
     """
 
@@ -3110,6 +3114,22 @@ class NullType(TypeEngine):
     comparator_factory = Comparator
 
 
+class TableValueType(HasCacheKey, TypeEngine):
+    """Refers to a table value type."""
+
+    _is_table_value = True
+
+    _traverse_internals = [
+        ("_elements", InternalTraversal.dp_clauseelement_list),
+    ]
+
+    def __init__(self, *elements):
+        self._elements = [
+            coercions.expect(roles.StrAsPlainColumnRole, elem)
+            for elem in elements
+        ]
+
+
 class MatchType(Boolean):
     """Refers to the return type of the MATCH operator.
 
@@ -3131,6 +3151,7 @@ BOOLEANTYPE = Boolean()
 STRINGTYPE = String()
 INTEGERTYPE = Integer()
 MATCHTYPE = MatchType()
+TABLEVALUE = TableValueType()
 
 _type_map = {
     int: Integer(),
@@ -3183,5 +3204,6 @@ type_api.INTEGERTYPE = INTEGERTYPE
 type_api.NULLTYPE = NULLTYPE
 type_api.MATCHTYPE = MATCHTYPE
 type_api.INDEXABLE = Indexable
+type_api.TABLEVALUE = TABLEVALUE
 type_api._resolve_value_to_type = _resolve_value_to_type
 TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE
index 2887813adf3763168680e285b481cd796be6fa3c..51a5310009f881163a7dfd60ceb0b60d560dca5a 100644 (file)
@@ -857,7 +857,10 @@ class _GetChildren(InternalTraversal):
     with a _traverse_internals collection."""
 
     def visit_has_cache_key(self, element, **kw):
-        return (element,)
+        # the GetChildren traversal refers explicitly to ClauseElement
+        # structures.  Within these, a plain HasCacheKey is not a
+        # ClauseElement, so don't include these.
+        return ()
 
     def visit_clauseelement(self, element, **kw):
         return (element,)
index 462a8763b0709fb88bd06356b2a6050f649e65b3..46751cb22c8005b693193ebaa37f03e0b36f032e 100644 (file)
@@ -24,6 +24,7 @@ NULLTYPE = None
 STRINGTYPE = None
 MATCHTYPE = None
 INDEXABLE = None
+TABLEVALUE = None
 _resolve_value_to_type = None
 
 
@@ -45,6 +46,7 @@ class TypeEngine(Traversible):
     _sqla_type = True
     _isnull = False
     _is_tuple_type = False
+    _is_table_value = False
 
     class Comparator(operators.ColumnOperators):
         """Base class for custom comparison operations defined at the
index fe0fbf6691f9761940b09abc6fa339b5a6513a98..8e113849ee2b500796811f3c8de92ffe63749373 100644 (file)
@@ -75,10 +75,8 @@ def _generate_compiler_dispatch(cls):
         try:
             meth = getter(visitor)
         except AttributeError as err:
-            util.raise_(
-                exc.UnsupportedCompilationError(visitor, cls),
-                replace_context=err,
-            )
+            return visitor.visit_unsupported_compilation(self, err, **kw)
+
         else:
             return meth(self, **kw)
 
index f5286c85d4bf1bf2df87c0e5ae1ede11b31885e6..d8da9c8184657ae34f0246f508e45f95390c057c 100644 (file)
@@ -57,6 +57,18 @@ class SuiteRequirements(Requirements):
 
         return exclusions.open()
 
+    @property
+    def table_value_constructor(self):
+        """Database / dialect supports a query like::
+
+             SELECT * FROM VALUES ( (c1, c2), (c1, c2), ...)
+             AS some_table(col1, col2)
+
+        SQLAlchemy generates this with the :func:`_sql.values` function.
+
+        """
+        return exclusions.closed()
+
     @property
     def standard_cursor_sql(self):
         """Target database passes SQL-92 style statements to cursor.execute()
index 0d9f08848fa404db025695434402ec00b724ea60..c6dacd42394c234914c0d6e5be684655844c84f8 100644 (file)
@@ -32,6 +32,7 @@ from ... import true
 from ... import tuple_
 from ... import union
 from ... import util
+from ... import values
 from ...exc import DatabaseError
 from ...exc import ProgrammingError
 
@@ -151,6 +152,22 @@ class OrderByLabelTest(fixtures.TablesTest):
         self._assert_result(stmt, [(1, 3), (1, 5), (1, 7)])
 
 
+class ValuesExpressionTest(fixtures.TestBase):
+    __requires__ = ("table_value_constructor",)
+
+    __backend__ = True
+
+    def test_tuples(self, connection):
+        value_expr = values(
+            column("id", Integer), column("name", String), name="my_values"
+        ).data([(1, "name1"), (2, "name2"), (3, "name3")])
+
+        eq_(
+            connection.execute(select(value_expr)).all(),
+            [(1, "name1"), (2, "name2"), (3, "name3")],
+        )
+
+
 class FetchLimitOffsetTest(fixtures.TablesTest):
     __backend__ = True
 
index d3b2e06441b1bb767803cb3abc758ebefd50e962..d22016e8f9fa941c734851072195df3d7f130016 100644 (file)
@@ -589,3 +589,83 @@ class MatchTest(fixtures.TablesTest, AssertsCompiledSQL):
             .order_by(matchtable.c.id)
         ).fetchall()
         eq_([1, 3, 5], [r.id for r in results])
+
+
+class TableValuedTest(fixtures.TestBase):
+    __backend__ = True
+    __only_on__ = "mssql"
+
+    @testing.fixture
+    def scalar_strings(self, connection):
+        connection.exec_driver_sql(
+            """
+
+CREATE FUNCTION scalar_strings (
+)
+RETURNS TABLE
+AS
+RETURN
+    SELECT
+        my_string
+    FROM (
+        VALUES ('some string'), ('some string'), ('some string')
+    ) AS my_tab(my_string)
+        """
+        )
+        yield
+        connection.exec_driver_sql("DROP FUNCTION scalar_strings")
+
+    @testing.fixture
+    def two_strings(self, connection):
+        connection.exec_driver_sql(
+            """
+CREATE FUNCTION three_pairs (
+)
+RETURNS TABLE
+AS
+RETURN
+    SELECT
+        s1 AS string1, s2 AS string2
+    FROM (
+        VALUES ('a', 'b'), ('c', 'd'), ('e', 'f')
+    ) AS my_tab(s1, s2)
+"""
+        )
+        yield
+        connection.exec_driver_sql("DROP FUNCTION three_pairs")
+
+    def test_scalar_strings_control(self, scalar_strings, connection):
+        result = (
+            connection.exec_driver_sql(
+                "SELECT my_string FROM scalar_strings()"
+            )
+            .scalars()
+            .all()
+        )
+        eq_(result, ["some string"] * 3)
+
+    def test_scalar_strings_named_control(self, scalar_strings, connection):
+        result = (
+            connection.exec_driver_sql(
+                "SELECT anon_1.my_string " "FROM scalar_strings() AS anon_1"
+            )
+            .scalars()
+            .all()
+        )
+        eq_(result, ["some string"] * 3)
+
+    def test_scalar_strings(self, scalar_strings, connection):
+        fn = func.scalar_strings().table_valued("my_string")
+        result = connection.execute(select(fn.c.my_string)).scalars().all()
+        eq_(result, ["some string"] * 3)
+
+    def test_two_strings_control(self, two_strings, connection):
+        result = connection.exec_driver_sql(
+            "SELECT string1, string2 FROM three_pairs ()"
+        ).all()
+        eq_(result, [("a", "b"), ("c", "d"), ("e", "f")])
+
+    def test_two_strings(self, two_strings, connection):
+        fn = func.three_pairs().table_valued("string1", "string2")
+        result = connection.execute(select(fn.c.string1, fn.c.string2)).all()
+        eq_(result, [("a", "b"), ("c", "d"), ("e", "f")])
index 21be331a5f682c33ad6437731c11f9e95968605b..c8e0ff7ef66af8153981d5e2cc2aaa8515bdfc9e 100644 (file)
@@ -1482,3 +1482,34 @@ class RegexpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
             "REGEXP_REPLACE(mytable.myid, :myid_1, :myid_2, mytable.name)",
             checkparams={"myid_1": "pattern", "myid_2": "replacement"},
         )
+
+
+class TableValuedFunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+    __dialect__ = "oracle"
+
+    def test_scalar_alias_column(self):
+        fn = func.scalar_strings(5)
+        stmt = select(fn.alias().column)
+        self.assert_compile(
+            stmt,
+            "SELECT COLUMN_VALUE anon_1 "
+            "FROM TABLE (scalar_strings(:scalar_strings_1)) anon_1",
+        )
+
+    def test_column_valued(self):
+        fn = func.scalar_strings(5)
+        stmt = select(fn.column_valued())
+        self.assert_compile(
+            stmt,
+            "SELECT COLUMN_VALUE anon_1 "
+            "FROM TABLE (scalar_strings(:scalar_strings_1)) anon_1",
+        )
+
+    def test_table_valued(self):
+        fn = func.three_pairs().table_valued("string1", "string2")
+        stmt = select(fn.c.string1, fn.c.string2)
+        self.assert_compile(
+            stmt,
+            "SELECT anon_1.string1, anon_1.string2 "
+            "FROM TABLE (three_pairs()) anon_1",
+        )
index 2ec30684254c596eb0836bbf3df3e86a19afdc77..cce33d4a26a26f8b3eff953081cfc1f464abf2d9 100644 (file)
@@ -7,6 +7,7 @@ from sqlalchemy import Computed
 from sqlalchemy import create_engine
 from sqlalchemy import exc
 from sqlalchemy import Float
+from sqlalchemy import func
 from sqlalchemy import Integer
 from sqlalchemy import literal_column
 from sqlalchemy import outparam
@@ -946,3 +947,110 @@ class CXOracleConnectArgsTest(fixtures.TestBase):
             "auto_convert_lobs",
             False,
         )
+
+
+class TableValuedTest(fixtures.TestBase):
+    __backend__ = True
+    __only_on__ = "oracle"
+
+    @testing.fixture
+    def scalar_strings(self, connection):
+        connection.exec_driver_sql(
+            "CREATE OR REPLACE TYPE strings_t IS TABLE OF VARCHAR2 (100)"
+        )
+        connection.exec_driver_sql(
+            r"""
+CREATE OR REPLACE FUNCTION scalar_strings (
+   count_in IN INTEGER)
+   RETURN strings_t
+   AUTHID DEFINER
+IS
+   l_strings   strings_t := strings_t ();
+BEGIN
+   l_strings.EXTEND (count_in);
+
+   FOR indx IN 1 .. count_in
+   LOOP
+      l_strings (indx) := 'some string';
+   END LOOP;
+
+   RETURN l_strings;
+END;
+        """
+        )
+        yield
+        connection.exec_driver_sql("DROP FUNCTION scalar_strings")
+        connection.exec_driver_sql("DROP TYPE strings_t")
+
+    @testing.fixture
+    def two_strings(self, connection):
+        connection.exec_driver_sql(
+            """
+CREATE OR REPLACE TYPE two_strings_ot
+   AUTHID DEFINER IS OBJECT
+(
+   string1 VARCHAR2 (10),
+   string2 VARCHAR2 (10)
+)"""
+        )
+        connection.exec_driver_sql(
+            """
+            CREATE OR REPLACE TYPE two_strings_nt IS TABLE OF two_strings_ot
+"""
+        )
+
+        connection.exec_driver_sql(
+            """
+        CREATE OR REPLACE FUNCTION three_pairs
+   RETURN two_strings_nt
+   AUTHID DEFINER
+IS
+   l_strings   two_strings_nt;
+BEGIN
+   RETURN two_strings_nt (two_strings_ot ('a', 'b'),
+                          two_strings_ot ('c', 'd'),
+                          two_strings_ot ('e', 'f'));
+END;
+"""
+        )
+        yield
+        connection.exec_driver_sql("DROP FUNCTION three_pairs")
+        connection.exec_driver_sql("DROP TYPE two_strings_nt")
+        connection.exec_driver_sql("DROP TYPE two_strings_ot")
+
+    def test_scalar_strings_control(self, scalar_strings, connection):
+        result = (
+            connection.exec_driver_sql(
+                "SELECT COLUMN_VALUE my_string FROM TABLE (scalar_strings (5))"
+            )
+            .scalars()
+            .all()
+        )
+        eq_(result, ["some string"] * 5)
+
+    def test_scalar_strings_named_control(self, scalar_strings, connection):
+        result = (
+            connection.exec_driver_sql(
+                "SELECT COLUMN_VALUE anon_1 "
+                "FROM TABLE (scalar_strings (5)) anon_1"
+            )
+            .scalars()
+            .all()
+        )
+        eq_(result, ["some string"] * 5)
+
+    def test_scalar_strings(self, scalar_strings, connection):
+        fn = func.scalar_strings(5)
+        result = connection.execute(select(fn.column_valued())).scalars().all()
+        eq_(result, ["some string"] * 5)
+
+    def test_two_strings_control(self, two_strings, connection):
+        result = connection.exec_driver_sql(
+            "SELECT string1, string2 FROM TABLE (three_pairs ())"
+        ).all()
+        eq_(result, [("a", "b"), ("c", "d"), ("e", "f")])
+
+    def test_two_strings(self, two_strings, connection):
+        fn = func.three_pairs().table_valued("string1", "string2")
+        result = connection.execute(select(fn.c.string1, fn.c.string2)).all()
+        eq_(result, [("a", "b"), ("c", "d"), ("e", "f")])
index 47f86e791f6507966a9b4cac3b8147c46a63425f..ce64ca16fc55294a36727f0aa8b9369c4e78e3e8 100644 (file)
@@ -3,11 +3,13 @@
 import datetime
 
 from sqlalchemy import and_
+from sqlalchemy import cast
 from sqlalchemy import Column
 from sqlalchemy import Date
 from sqlalchemy import DateTime
 from sqlalchemy import exc
 from sqlalchemy import extract
+from sqlalchemy import Float
 from sqlalchemy import ForeignKey
 from sqlalchemy import func
 from sqlalchemy import Integer
@@ -22,8 +24,10 @@ from sqlalchemy import Table
 from sqlalchemy import testing
 from sqlalchemy import text
 from sqlalchemy import Time
+from sqlalchemy import true
 from sqlalchemy import tuple_
 from sqlalchemy.dialects import postgresql
+from sqlalchemy.dialects.postgresql import JSONB
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import AssertsExecutionResults
@@ -1097,3 +1101,233 @@ class ExtractTest(fixtures.TablesTest):
             datetime.timedelta(days=5) + t.c.dtme,
             overrides={"day": 15, "epoch": 1337084125.0},
         )
+
+
+class TableValuedRoundTripTest(fixtures.TestBase):
+    __backend__ = True
+    __only_on__ = "postgresql"
+
+    def test_generate_series_scalar(self, connection):
+        x = func.generate_series(1, 2).alias("x")
+        y = func.generate_series(1, 2).alias("y")
+
+        stmt = select(x.column, y.column).join_from(x, y, true())
+
+        eq_(connection.execute(stmt).all(), [(1, 1), (1, 2), (2, 1), (2, 2)])
+
+    def test_aggregate_scalar_over_table_valued(self, metadata, connection):
+        test = Table(
+            "test", metadata, Column("id", Integer), Column("data", JSONB)
+        )
+        test.create(connection)
+
+        connection.execute(
+            test.insert(),
+            [
+                {"id": 1, "data": {"key": [23.7, 108.17, 55.98]}},
+                {"id": 2, "data": {"key": [2.320, 9.55]}},
+                {"id": 3, "data": {"key": [10.5, 6]}},
+            ],
+        )
+
+        elem = (
+            func.jsonb_array_elements_text(test.c.data["key"])
+            .table_valued("value")
+            .alias("elem")
+        )
+
+        maxdepth = select(func.max(cast(elem.c.value, Float))).label(
+            "maxdepth"
+        )
+
+        stmt = select(test.c.id.label("test_id"), maxdepth).order_by(
+            "maxdepth"
+        )
+
+        eq_(
+            connection.execute(stmt).all(), [(2, 9.55), (3, 10.5), (1, 108.17)]
+        )
+
+    @testing.fixture
+    def assets_transactions(self, metadata, connection):
+        assets_transactions = Table(
+            "assets_transactions",
+            metadata,
+            Column("id", Integer),
+            Column("contents", JSONB),
+        )
+        assets_transactions.create(connection)
+        connection.execute(
+            assets_transactions.insert(),
+            [
+                {"id": 1, "contents": {"k1": "v1"}},
+                {"id": 2, "contents": {"k2": "v2"}},
+                {"id": 3, "contents": {"k3": "v3"}},
+            ],
+        )
+        return assets_transactions
+
+    def test_scalar_table_valued(self, assets_transactions, connection):
+        stmt = select(
+            assets_transactions.c.id,
+            func.jsonb_each(
+                assets_transactions.c.contents, type_=JSONB
+            ).scalar_table_valued("key"),
+            func.jsonb_each(
+                assets_transactions.c.contents, type_=JSONB
+            ).scalar_table_valued("value"),
+        )
+
+        eq_(
+            connection.execute(stmt).all(),
+            [(1, "k1", "v1"), (2, "k2", "v2"), (3, "k3", "v3")],
+        )
+
+    def test_table_valued(self, assets_transactions, connection):
+
+        jb = func.jsonb_each(assets_transactions.c.contents).table_valued(
+            "key", "value"
+        )
+
+        stmt = select(assets_transactions.c.id, jb.c.key, jb.c.value).join(
+            jb, true()
+        )
+        eq_(
+            connection.execute(stmt).all(),
+            [(1, "k1", "v1"), (2, "k2", "v2"), (3, "k3", "v3")],
+        )
+
+    @testing.fixture
+    def axy_table(self, metadata, connection):
+        a = Table(
+            "a",
+            metadata,
+            Column("id", Integer),
+            Column("x", Integer),
+            Column("y", Integer),
+        )
+        a.create(connection)
+        connection.execute(
+            a.insert(),
+            [
+                {"id": 1, "x": 5, "y": 4},
+                {"id": 2, "x": 15, "y": 3},
+                {"id": 3, "x": 7, "y": 9},
+            ],
+        )
+
+        return a
+
+    def test_function_against_table_record(self, axy_table, connection):
+        """
+        SELECT row_to_json(anon_1) AS row_to_json_1
+        FROM (SELECT a.id AS id, a.x AS x, a.y AS y
+        FROM a) AS anon_1
+
+        """
+
+        stmt = select(func.row_to_json(axy_table.table_valued()))
+
+        eq_(
+            connection.execute(stmt).scalars().all(),
+            [
+                {"id": 1, "x": 5, "y": 4},
+                {"id": 2, "x": 15, "y": 3},
+                {"id": 3, "x": 7, "y": 9},
+            ],
+        )
+
+    def test_function_against_subq_record(self, axy_table, connection):
+        """
+        SELECT row_to_json(anon_1) AS row_to_json_1
+        FROM (SELECT a.id AS id, a.x AS x, a.y AS y
+        FROM a) AS anon_1
+
+        """
+
+        stmt = select(
+            func.row_to_json(axy_table.select().subquery().table_valued())
+        )
+
+        eq_(
+            connection.execute(stmt).scalars().all(),
+            [
+                {"id": 1, "x": 5, "y": 4},
+                {"id": 2, "x": 15, "y": 3},
+                {"id": 3, "x": 7, "y": 9},
+            ],
+        )
+
+    def test_function_against_row_constructor(self, connection):
+
+        stmt = select(func.row_to_json(func.row(1, "foo")))
+
+        eq_(connection.scalar(stmt), {"f1": 1, "f2": "foo"})
+
+    def test_with_ordinality_named(self, connection):
+
+        stmt = select(
+            func.generate_series(4, 1, -1)
+            .table_valued("gs", with_ordinality="ordinality")
+            .render_derived()
+        )
+
+        eq_(connection.execute(stmt).all(), [(4, 1), (3, 2), (2, 3), (1, 4)])
+
+    def test_with_ordinality_star(self, connection):
+
+        stmt = select("*").select_from(
+            func.generate_series(4, 1, -1).table_valued(
+                with_ordinality="ordinality"
+            )
+        )
+
+        eq_(connection.execute(stmt).all(), [(4, 1), (3, 2), (2, 3), (1, 4)])
+
+    def test_plain_old_unnest(self, connection):
+        fn = func.unnest(
+            postgresql.array(["one", "two", "three", "four"])
+        ).column_valued()
+
+        stmt = select(fn)
+
+        eq_(
+            connection.execute(stmt).all(),
+            [("one",), ("two",), ("three",), ("four",)],
+        )
+
+    def test_unnest_with_ordinality(self, connection):
+
+        array_val = postgresql.array(
+            [postgresql.array([14, 41, 7]), postgresql.array([54, 9, 49])]
+        )
+        stmt = select("*").select_from(
+            func.unnest(array_val)
+            .table_valued("elts", with_ordinality="num")
+            .render_derived()
+            .alias("t")
+        )
+        eq_(
+            connection.execute(stmt).all(),
+            [(14, 1), (41, 2), (7, 3), (54, 4), (9, 5), (49, 6)],
+        )
+
+    def test_unnest_with_ordinality_named(self, connection):
+
+        array_val = postgresql.array(
+            [postgresql.array([14, 41, 7]), postgresql.array([54, 9, 49])]
+        )
+
+        fn = (
+            func.unnest(array_val)
+            .table_valued("elts", with_ordinality="num")
+            .alias("t")
+            .render_derived()
+        )
+
+        stmt = select(fn.c.elts, fn.c.num)
+
+        eq_(
+            connection.execute(stmt).all(),
+            [(14, 1), (41, 2), (7, 3), (54, 4), (9, 5), (49, 6)],
+        )
index bfa8d8cf62451c9f60356d622a56fef42005d287..1e4d0916c48ae079f6a62d83d9db7ccc908d0393 100644 (file)
@@ -603,6 +603,10 @@ class DefaultRequirements(SuiteRequirements):
             self._sqlite_file_db
         )
 
+    @property
+    def table_value_constructor(self):
+        return only_on(["postgresql", "mssql"])
+
     @property
     def update_nowait(self):
         """Target database must support SELECT...FOR UPDATE NOWAIT"""
@@ -949,6 +953,13 @@ class DefaultRequirements(SuiteRequirements):
 
         return skip_if(["mssql", "sqlite"])
 
+    @property
+    def tuple_valued_builtin_functions(self):
+        return only_on(
+            lambda config: self._sqlite_json(config)
+            or against(config, "postgresql")
+        )
+
     @property
     def array_type(self):
         return only_on(
index fc9fd0926c89fdb1f90e6ef4c549e166300f45ca..30235995dbf7056ee8ce7e17f97d0f08ef818379 100644 (file)
@@ -273,6 +273,39 @@ class CoreFixtures(object):
             func.next_value(Sequence("q")),
             func.next_value(Sequence("p")),
         ),
+        lambda: (
+            func.json_to_recordset("{foo}"),
+            func.json_to_recordset("{foo}").table_valued("a", "b"),
+            func.jsonb_to_recordset("{foo}").table_valued("a", "b"),
+            func.json_to_recordset("{foo}")
+            .table_valued("a", "b")
+            .render_derived(),
+            func.json_to_recordset("{foo}")
+            .table_valued("a", with_ordinality="b")
+            .render_derived(),
+            func.json_to_recordset("{foo}")
+            .table_valued("a", with_ordinality="c")
+            .render_derived(),
+            func.json_to_recordset("{foo}")
+            .table_valued(column("a", Integer), column("b", String))
+            .render_derived(),
+            func.json_to_recordset("{foo}")
+            .table_valued(column("a", Integer), column("b", String))
+            .render_derived(with_types=True),
+            func.json_to_recordset("{foo}")
+            .table_valued("b", "c")
+            .render_derived(),
+            func.json_to_recordset("{foo}")
+            .table_valued("a", "b")
+            .alias("foo")
+            .render_derived(with_types=True),
+            func.json_to_recordset("{foo}")
+            .table_valued("a", "b")
+            .alias("foo"),
+            func.json_to_recordset("{foo}").column_valued(),
+            func.json_to_recordset("{foo}").scalar_table_valued("foo"),
+        ),
+        lambda: (table_a.table_valued(), table_b.table_valued()),
         lambda: (True_(), False_()),
         lambda: (Null(),),
         lambda: (ReturnTypeFromArgs("foo"), ReturnTypeFromArgs(5)),
index 19562dade0eb8d88f437f4eaefa15fa82c83d4a2..e460a90cbbe214a5aedf0da888e1b3d4433538b4 100644 (file)
@@ -5,12 +5,15 @@ import decimal
 from sqlalchemy import ARRAY
 from sqlalchemy import bindparam
 from sqlalchemy import Boolean
+from sqlalchemy import cast
 from sqlalchemy import Column
 from sqlalchemy import Date
 from sqlalchemy import DateTime
 from sqlalchemy import extract
+from sqlalchemy import Float
 from sqlalchemy import func
 from sqlalchemy import Integer
+from sqlalchemy import JSON
 from sqlalchemy import literal
 from sqlalchemy import literal_column
 from sqlalchemy import Numeric
@@ -20,6 +23,8 @@ from sqlalchemy import sql
 from sqlalchemy import String
 from sqlalchemy import Table
 from sqlalchemy import testing
+from sqlalchemy import Text
+from sqlalchemy import true
 from sqlalchemy import types as sqltypes
 from sqlalchemy import util
 from sqlalchemy.dialects import mysql
@@ -1150,3 +1155,494 @@ class RegisterTest(fixtures.TestBase, AssertsCompiledSQL):
 
         assert "not_registered_func" not in functions._registry["_default"]
         assert isinstance(func.not_registered_func_child().type, Integer)
+
+
+class TableValuedCompileTest(fixtures.TestBase, AssertsCompiledSQL):
+    """test the full set of functions as FROM developed in [ticket:3566]"""
+
+    __dialect__ = "default_enhanced"
+
+    def test_aggregate_scalar_over_table_valued(self):
+        test = table("test", column("id"), column("data", JSON))
+
+        elem = (
+            func.json_array_elements_text(test.c.data["key"])
+            .table_valued("value")
+            .alias("elem")
+        )
+
+        maxdepth = select(func.max(cast(elem.c.value, Float))).label(
+            "maxdepth"
+        )
+
+        stmt = select(test.c.id.label("test_id"), maxdepth).order_by(
+            "maxdepth"
+        )
+
+        self.assert_compile(
+            stmt,
+            "SELECT test.id AS test_id, "
+            "(SELECT max(CAST(elem.value AS FLOAT)) AS max_1 "
+            "FROM json_array_elements_text(test.data[:data_1]) AS elem) "
+            "AS maxdepth "
+            "FROM test ORDER BY maxdepth",
+        )
+
+    def test_scalar_table_valued(self):
+        assets_transactions = table(
+            "assets_transactions", column("id"), column("contents", JSON)
+        )
+
+        stmt = select(
+            assets_transactions.c.id,
+            func.jsonb_each(
+                assets_transactions.c.contents
+            ).scalar_table_valued("key"),
+            func.jsonb_each(
+                assets_transactions.c.contents
+            ).scalar_table_valued("value"),
+        )
+        self.assert_compile(
+            stmt,
+            "SELECT assets_transactions.id, "
+            "(jsonb_each(assets_transactions.contents)).key, "
+            "(jsonb_each(assets_transactions.contents)).value "
+            "FROM assets_transactions",
+        )
+
+    def test_table_valued_one(self):
+        assets_transactions = table(
+            "assets_transactions", column("id"), column("contents", JSON)
+        )
+
+        jb = func.jsonb_each(assets_transactions.c.contents).table_valued(
+            "key", "value"
+        )
+
+        stmt = select(assets_transactions.c.id, jb.c.key, jb.c.value).join(
+            jb, true()
+        )
+
+        self.assert_compile(
+            stmt,
+            "SELECT assets_transactions.id, anon_1.key, anon_1.value "
+            "FROM assets_transactions "
+            "JOIN jsonb_each(assets_transactions.contents) AS anon_1 ON true",
+        )
+
+    def test_table_valued_two(self):
+        """
+        SELECT vi.id, vv.value
+        FROM value_ids() AS vi JOIN values AS vv ON vv.id = vi.id
+
+        """
+
+        values = table(
+            "values",
+            column(
+                "id",
+                Integer,
+            ),
+            column("value", String),
+        )
+        vi = func.value_ids().table_valued(column("id", Integer)).alias("vi")
+        vv = values.alias("vv")
+
+        stmt = select(vi.c.id, vv.c.value).select_from(  # noqa
+            vi.join(vv, vv.c.id == vi.c.id)
+        )
+        self.assert_compile(
+            stmt,
+            "SELECT vi.id, vv.value FROM value_ids() AS vi "
+            "JOIN values AS vv ON vv.id = vi.id",
+        )
+
+    def test_table_as_table_valued(self):
+        a = table(
+            "a",
+            column("id"),
+            column("x"),
+            column("y"),
+        )
+
+        stmt = select(func.row_to_json(a.table_valued()))
+
+        self.assert_compile(
+            stmt, "SELECT row_to_json(a) AS row_to_json_1 FROM a"
+        )
+
+    def test_subquery_as_table_valued(self):
+        """
+        SELECT row_to_json(anon_1) AS row_to_json_1
+        FROM (SELECT a.id AS id, a.x AS x, a.y AS y
+        FROM a) AS anon_1
+
+        """
+
+        a = table(
+            "a",
+            column("id"),
+            column("x"),
+            column("y"),
+        )
+
+        stmt = select(func.row_to_json(a.select().subquery().table_valued()))
+
+        self.assert_compile(
+            stmt,
+            "SELECT row_to_json(anon_1) AS row_to_json_1 FROM "
+            "(SELECT a.id AS id, a.x AS x, a.y AS y FROM a) AS anon_1",
+        )
+
+    def test_scalar_subquery(self):
+
+        a = table(
+            "a",
+            column("id"),
+            column("x"),
+            column("y"),
+        )
+
+        stmt = select(func.row_to_json(a.select().scalar_subquery()))
+
+        self.assert_compile(
+            stmt,
+            "SELECT row_to_json((SELECT a.id, a.x, a.y FROM a)) "
+            "AS row_to_json_1",
+        )
+
+    def test_named_with_ordinality(self):
+        """
+        SELECT a.id AS a_id, a.refs AS a_refs,
+        unnested.unnested AS unnested_unnested,
+        unnested.ordinality AS unnested_ordinality,
+        b.id AS b_id, b.ref AS b_ref
+        FROM a LEFT OUTER JOIN unnest(a.refs)
+        `WITH ORDINALITY AS unnested(unnested, ordinality) ON true
+        LEFT OUTER JOIN b ON unnested.unnested = b.ref
+
+        """  # noqa 501
+
+        a = table("a", column("id"), column("refs"))
+        b = table("b", column("id"), column("ref"))
+
+        unnested = (
+            func.unnest(a.c.refs)
+            .table_valued("unnested", with_ordinality="ordinality")
+            .render_derived()
+            .alias("unnested")
+        )
+
+        stmt = (
+            select(
+                a.c.id, a.c.refs, unnested.c.unnested, unnested.c.ordinality
+            )
+            .outerjoin(unnested, true())
+            .outerjoin(
+                b,
+                unnested.c.unnested == b.c.ref,
+            )
+        )
+        self.assert_compile(
+            stmt,
+            "SELECT a.id, a.refs, unnested.unnested, unnested.ordinality "
+            "FROM a "
+            "LEFT OUTER JOIN unnest(a.refs) "
+            "WITH ORDINALITY AS unnested(unnested, ordinality) ON true "
+            "LEFT OUTER JOIN b ON unnested.unnested = b.ref",
+        )
+
+    def test_star_with_ordinality(self):
+        """
+        SELECT * FROM generate_series(4,1,-1) WITH ORDINALITY;
+        """
+
+        stmt = select("*").select_from(  # noqa
+            func.generate_series(4, 1, -1).table_valued(
+                with_ordinality="ordinality"
+            )
+        )
+        self.assert_compile(
+            stmt,
+            "SELECT * FROM generate_series"
+            "(:generate_series_1, :generate_series_2, :generate_series_3) "
+            "WITH ORDINALITY AS anon_1",
+        )
+
+    def test_json_object_keys_with_ordinality(self):
+        """
+        SELECT * FROM json_object_keys('{"a1":"1","a2":"2","a3":"3"}')
+        WITH ORDINALITY AS t(keys, n);
+        """
+        stmt = select("*").select_from(
+            func.json_object_keys(
+                literal({"a1": "1", "a2": "2", "a3": "3"}, type_=JSON)
+            )
+            .table_valued("keys", with_ordinality="n")
+            .render_derived()
+            .alias("t")
+        )
+
+        self.assert_compile(
+            stmt,
+            "SELECT * FROM json_object_keys(:param_1) "
+            "WITH ORDINALITY AS t(keys, n)",
+        )
+
+    def test_alias_column(self):
+        """
+
+        ::
+
+            SELECT x, y
+            FROM
+                generate_series(:generate_series_1, :generate_series_2) AS x,
+                generate_series(:generate_series_3, :generate_series_4) AS y
+
+        """
+
+        x = func.generate_series(1, 2).alias("x")
+        y = func.generate_series(3, 4).alias("y")
+        stmt = select(x.column, y.column)
+
+        self.assert_compile(
+            stmt,
+            "SELECT x, y FROM "
+            "generate_series(:generate_series_1, :generate_series_2) AS x, "
+            "generate_series(:generate_series_3, :generate_series_4) AS y",
+        )
+
+    def test_column_valued_one(self):
+        fn = func.unnest(["one", "two", "three", "four"]).column_valued()
+
+        stmt = select(fn)
+
+        self.assert_compile(
+            stmt, "SELECT anon_1 FROM unnest(:unnest_1) AS anon_1"
+        )
+
+    def test_column_valued_two(self):
+        """
+
+        ::
+
+            SELECT x, y
+            FROM
+                generate_series(:generate_series_1, :generate_series_2) AS x,
+                generate_series(:generate_series_3, :generate_series_4) AS y
+
+        """
+
+        x = func.generate_series(1, 2).column_valued("x")
+        y = func.generate_series(3, 4).column_valued("y")
+        stmt = select(x, y)
+
+        self.assert_compile(
+            stmt,
+            "SELECT x, y FROM "
+            "generate_series(:generate_series_1, :generate_series_2) AS x, "
+            "generate_series(:generate_series_3, :generate_series_4) AS y",
+        )
+
+    def test_column_valued_subquery(self):
+        x = func.generate_series(1, 2).column_valued("x")
+        y = func.generate_series(3, 4).column_valued("y")
+        subq = select(x, y).subquery()
+        stmt = select(subq).where(subq.c.x > 2)
+
+        self.assert_compile(
+            stmt,
+            "SELECT anon_1.x, anon_1.y FROM "
+            "(SELECT x, y FROM "
+            "generate_series(:generate_series_1, :generate_series_2) AS x, "
+            "generate_series(:generate_series_3, :generate_series_4) AS y"
+            ") AS anon_1 "
+            "WHERE anon_1.x > :x_1",
+        )
+
+    @testing.combinations((True,), (False,))
+    def test_render_derived_with_lateral(self, apply_alias_after_lateral):
+        """
+        # this is the "record" type
+
+        SELECT
+            table1.user_id AS table1_user_id,
+            table2.name AS table2_name,
+            jsonb_table.name AS jsonb_table_name,
+            count(jsonb_table.time) AS count_1
+            FROM table1
+            JOIN table2 ON table1.user_id = table2.id
+            JOIN LATERAL jsonb_to_recordset(table1.jsonb)
+            AS jsonb_table(name TEXT, time FLOAT) ON true
+            WHERE table2.route_id = %(route_id_1)s
+            AND jsonb_table.name IN (%(name_1)s, %(name_2)s, %(name_3)s)
+            GROUP BY table1.user_id, table2.name, jsonb_table.name
+            ORDER BY table2.name
+
+        """  # noqa
+
+        table1 = table("table1", column("user_id"), column("jsonb"))
+        table2 = table(
+            "table2", column("id"), column("name"), column("route_id")
+        )
+        jsonb_table = func.jsonb_to_recordset(table1.c.jsonb).table_valued(
+            column("name", Text), column("time", Float)
+        )
+
+        # I'm a little concerned about the naming, that lateral() and
+        # alias() both make a new name unconditionally.  lateral() already
+        # works this way, so try to just make sure .alias() after the
+        # fact works too
+        if apply_alias_after_lateral:
+            jsonb_table = (
+                jsonb_table.render_derived(with_types=True)
+                .lateral()
+                .alias("jsonb_table")
+            )
+        else:
+            jsonb_table = jsonb_table.render_derived(with_types=True).lateral(
+                "jsonb_table"
+            )
+
+        stmt = (
+            select(
+                table1.c.user_id,
+                table2.c.name,
+                jsonb_table.c.name.label("jsonb_table_name"),
+                func.count(jsonb_table.c.time),
+            )
+            .select_from(table1)
+            .join(table2, table1.c.user_id == table2.c.id)
+            .join(jsonb_table, true())
+            .where(table2.c.route_id == 5)
+            .where(jsonb_table.c.name.in_(["n1", "n2", "n3"]))
+            .group_by(table1.c.user_id, table2.c.name, jsonb_table.c.name)
+            .order_by(table2.c.name)
+        )
+
+        self.assert_compile(
+            stmt,
+            "SELECT table1.user_id, table2.name, "
+            "jsonb_table.name AS jsonb_table_name, "
+            "count(jsonb_table.time) AS count_1 "
+            "FROM table1 "
+            "JOIN table2 ON table1.user_id = table2.id "
+            "JOIN LATERAL jsonb_to_recordset(table1.jsonb) "
+            "AS jsonb_table(name TEXT, time FLOAT) ON true "
+            "WHERE table2.route_id = 5 "
+            "AND jsonb_table.name IN ('n1', 'n2', 'n3') "
+            "GROUP BY table1.user_id, table2.name, jsonb_table.name "
+            "ORDER BY table2.name",
+            literal_binds=True,
+            render_postcompile=True,
+        )
+
+    def test_function_alias(self):
+        """
+        ::
+
+            SELECT result_elem -> 'Field' as field
+            FROM "check" AS check_, json_array_elements(
+            (
+                SELECT check_inside.response -> 'Results'
+                FROM "check" as check_inside
+                WHERE check_inside.id = check_.id
+            )
+            ) AS result_elem
+            WHERE result_elem ->> 'Name' = 'FooBar'
+
+        """
+        check = table("check", column("id"), column("response", JSON))
+
+        check_inside = check.alias("check_inside")
+        check_outside = check.alias("_check")
+
+        subq = (
+            select(check_inside.c.response["Results"])
+            .where(check_inside.c.id == check_outside.c.id)
+            .scalar_subquery()
+        )
+
+        fn = func.json_array_elements(subq, type_=JSON).alias("result_elem")
+
+        stmt = (
+            select(fn.column["Field"].label("field"))
+            .where(fn.column["Name"] == "FooBar")
+            .select_from(check_outside)
+        )
+
+        self.assert_compile(
+            stmt,
+            "SELECT result_elem[:result_elem_1] AS field "
+            "FROM json_array_elements("
+            "(SELECT check_inside.response[:response_1] AS anon_1 "
+            'FROM "check" AS check_inside '
+            "WHERE check_inside.id = _check.id)"
+            ') AS result_elem, "check" AS _check '
+            "WHERE result_elem[:result_elem_2] = :param_1",
+        )
+
+    def test_named_table_valued(self):
+
+        fn = (
+            func.json_to_recordset(  # noqa
+                '[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]'
+            )
+            .table_valued(column("a", Integer), column("b", String))
+            .render_derived(with_types=True)
+        )
+
+        stmt = select(fn.c.a, fn.c.b)
+
+        self.assert_compile(
+            stmt,
+            "SELECT anon_1.a, anon_1.b "
+            "FROM json_to_recordset(:json_to_recordset_1) "
+            "AS anon_1(a INTEGER, b VARCHAR)",
+        )
+
+    def test_named_table_valued_subquery(self):
+
+        fn = (
+            func.json_to_recordset(  # noqa
+                '[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]'
+            )
+            .table_valued(column("a", Integer), column("b", String))
+            .render_derived(with_types=True)
+        )
+
+        stmt = select(fn.c.a, fn.c.b).subquery()
+
+        stmt = select(stmt)
+
+        self.assert_compile(
+            stmt,
+            "SELECT anon_1.a, anon_1.b FROM "
+            "(SELECT anon_2.a AS a, anon_2.b AS b "
+            "FROM json_to_recordset(:json_to_recordset_1) "
+            "AS anon_2(a INTEGER, b VARCHAR)"
+            ") AS anon_1",
+        )
+
+    def test_named_table_valued_alias(self):
+
+        """select * from json_to_recordset
+        ('[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]') as x(a int, b text);"""
+
+        fn = (
+            func.json_to_recordset(  # noqa
+                '[{"a":1,"b":"foo"},{"a":"2","c":"bar"}]'
+            )
+            .table_valued(column("a", Integer), column("b", String))
+            .render_derived(with_types=True)
+            .alias("jbr")
+        )
+
+        stmt = select(fn.c.a, fn.c.b)
+
+        self.assert_compile(
+            stmt,
+            "SELECT jbr.a, jbr.b "
+            "FROM json_to_recordset(:json_to_recordset_1) "
+            "AS jbr(a INTEGER, b VARCHAR)",
+        )
index c5700fff270f002670e891e833a6df10f6c94f3f..a80ad7083e642d94b49ae13ebd98cf08f0ef8b6e 100644 (file)
@@ -98,6 +98,19 @@ class LateralTest(fixtures.TablesTest, AssertsCompiledSQL):
             "(SELECT people.people_id AS people_id FROM people) AS alias",
         )
 
+    def test_alias_of_lateral(self):
+        table1 = self.tables.people
+        subq = select(table1.c.people_id).subquery()
+
+        # this use case wasn't working until we changed the name of the
+        # "lateral" name to "lateral_" in compiler.visit_lateral(), was
+        # conflicting with the kwarg before
+        self.assert_compile(
+            select(subq.lateral().alias(name="alias")),
+            "SELECT alias.people_id FROM LATERAL "
+            "(SELECT people.people_id AS people_id FROM people) AS alias",
+        )
+
     def test_select_from_implicit_subquery(self):
         table1 = self.tables.people
         subq = select(table1.c.people_id)
index 0ef90e89e974b3ec74a3c011c5f544eed46a89de..9cbd969f34e64de9443550219b10b05c08c2a402 100644 (file)
@@ -162,6 +162,26 @@ class RoleTest(fixtures.TestBase):
                 select(column("q")).alias(),
             )
 
+    def test_table_valued_advice(self):
+        msg = (
+            r"SQL expression element expected, got %s. To create a "
+            r"column expression from a FROM clause row as a whole, "
+            r"use the .table_valued\(\) method."
+        )
+        assert_raises_message(
+            exc.ArgumentError,
+            msg % ("Table.*",),
+            expect,
+            roles.ExpressionElementRole,
+            t,
+        )
+
+        # no table_valued() message here right now, it goes to scalar subquery
+        with testing.expect_warnings(
+            "implicitly coercing SELECT object to scalar subquery"
+        ):
+            expect(roles.ExpressionElementRole, t.select().alias())
+
     def test_statement_no_text_coercion(self):
         assert_raises_message(
             exc.ArgumentError,