separator. Below we define a convention that will name :class:`.UniqueConstraint`
constraints with a name that joins together the names of all columns::
- metadata = MetaData(naming_convention={
+ metadata_obj = MetaData(naming_convention={
"uq": "uq_%(table_name)s_%(column_0_N_name)s"
})
table = Table(
- 'info', metadata,
+ 'info', metadata_obj,
Column('a', Integer),
Column('b', Integer),
Column('c', Integer),
constraint name would normally be generated from the table definition below::
long_names = Table(
- 'long_names', metadata,
+ 'long_names', metadata_obj,
Column('information_channel_code', Integer, key='a'),
Column('billing_convention_name', Integer, key='b'),
Column('product_identifier', Integer, key='c'),
Given a schema such as::
dv = Table(
- 'data_values', metadata,
+ 'data_values', metadata_obj,
Column('modulus', Integer, nullable=False),
Column('data', String(30)),
postgresql_partition_by='range(modulus)')
as several :class:`_schema.Column` -specific variants::
some_table = Table(
- 'some_table', metadata,
+ 'some_table', metadata_obj,
Column('id', Integer, primary_key=True, sqlite_on_conflict_primary_key='FAIL'),
Column('data', Integer),
UniqueConstraint('id', 'data', sqlite_on_conflict='IGNORE')
on :class:`_schema.Column`::
test = Table(
- 'test', metadata,
+ 'test', metadata_obj,
Column(
'id', Integer, primary_key=True, mssql_identity_start=100,
mssql_identity_increment=10
test = Table(
- 'test', metadata,
+ 'test', metadata_obj,
Column('id', Integer, primary_key=True, autoincrement=False),
Column('number', Integer, autoincrement=True)
)
from sqlalchemy import MetaData
- metadata = MetaData(bind=engine) # no longer supported
+ metadata_obj = MetaData(bind=engine) # no longer supported
- metadata.create_all() # requires Engine or Connection
+ metadata_obj.create_all() # requires Engine or Connection
- metadata.reflect() # requires Engine or Connection
+ metadata_obj.reflect() # requires Engine or Connection
- t = Table('t', metadata, autoload=True) # use autoload_with=engine
+ t = Table('t', metadata_obj, autoload=True) # use autoload_with=engine
result = engine.execute(t.select()) # no longer supported
from sqlalchemy import MetaData
- metadata = MetaData()
+ metadata_obj = MetaData()
# engine level:
# create tables
- metadata.create_all(engine)
+ metadata_obj.create_all(engine)
# reflect all tables
- metadata.reflect(engine)
+ metadata_obj.reflect(engine)
# reflect individual table
- t = Table('t', metadata, autoload_with=engine)
+ t = Table('t', metadata_obj, autoload_with=engine)
# connection level:
with engine.connect() as connection:
# create tables, requires explicit begin and/or commit:
with connection.begin():
- metadata.create_all(connection)
+ metadata_obj.create_all(connection)
# reflect all tables
- metadata.reflect(connection)
+ metadata_obj.reflect(connection)
# reflect individual table
- t = Table('t', metadata, autoload_with=connection)
+ t = Table('t', metadata_obj, autoload_with=connection)
# execute SQL statements
result = conn.execute(t.select())
# many choices
# bound metadata?
- metadata = MetaData(engine)
+ metadata_obj = MetaData(engine)
# or not?
- metadata = MetaData()
+ metadata_obj = MetaData()
# execute from engine?
result = engine.execute(stmt)
Examples of "structural" vs. "data" elements are as follows::
# table columns for CREATE TABLE - structural
- table = Table("table", metadata, Column('x', Integer), Column('y', Integer))
+ table = Table("table", metadata_obj, Column('x', Integer), Column('y', Integer))
# columns in a SELECT statement - structural
stmt = select(table.c.x, table.c.y)
from sqlalchemy import MetaData, Table, Column, Integer
- meta = MetaData()
- users_table = Table('users', meta,
+ metadata_obj = MetaData()
+ users_table = Table('users', metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String(50))
)
engine::
engine = create_engine('sqlite:///file.db')
- meta.bind = engine
+ metadata_obj.bind = engine
result = users_table.select().execute()
for row in result:
# ....
Given a table::
user_table = Table(
- 'user', metadata,
+ 'user', metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String(50))
)
connection = engine.raw_connection()
try:
- cursor = connection.cursor()
- cursor.callproc("my_procedure", ['x', 'y', 'z'])
- results = list(cursor.fetchall())
- cursor.close()
+ cursor_obj = connection.cursor()
+ cursor_obj.callproc("my_procedure", ['x', 'y', 'z'])
+ results = list(cursor_obj.fetchall())
+ cursor_obj.close()
connection.commit()
finally:
connection.close()
connection = engine.raw_connection()
try:
- cursor = connection.cursor()
- cursor.execute("select * from table1; select * from table2")
- results_one = cursor.fetchall()
- cursor.nextset()
- results_two = cursor.fetchall()
- cursor.close()
+ cursor_obj = connection.cursor()
+ cursor_obj.execute("select * from table1; select * from table2")
+ results_one = cursor_obj.fetchall()
+ cursor_obj.nextset()
+ results_two = cursor_obj.fetchall()
+ cursor_obj.close()
finally:
connection.close()
is specified by constructing a :class:`~sqlalchemy.schema.ForeignKey` object
as an argument to a :class:`~sqlalchemy.schema.Column` object::
- user_preference = Table('user_preference', metadata,
+ user_preference = Table('user_preference', metadata_obj,
Column('pref_id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
Column('pref_name', String(40), nullable=False),
has a composite primary key. Below we define a table ``invoice`` which has a
composite primary key::
- invoice = Table('invoice', metadata,
+ invoice = Table('invoice', metadata_obj,
Column('invoice_id', Integer, primary_key=True),
Column('ref_num', Integer, primary_key=True),
Column('description', String(60), nullable=False)
And then a table ``invoice_item`` with a composite foreign key referencing
``invoice``::
- invoice_item = Table('invoice_item', metadata,
+ invoice_item = Table('invoice_item', metadata_obj,
Column('item_id', Integer, primary_key=True),
Column('item_name', String(60), nullable=False),
Column('invoice_id', Integer, nullable=False),
most forms of ALTER. Given a schema like::
node = Table(
- 'node', metadata,
+ 'node', metadata_obj,
Column('node_id', Integer, primary_key=True),
Column(
'primary_element', Integer,
)
element = Table(
- 'element', metadata,
+ 'element', metadata_obj,
Column('element_id', Integer, primary_key=True),
Column('parent_node_id', Integer),
ForeignKeyConstraint(
.. sourcecode:: pycon+sql
>>> with engine.connect() as conn:
- ... metadata.create_all(conn, checkfirst=False)
+ ... metadata_obj.create_all(conn, checkfirst=False)
{opensql}CREATE TABLE element (
element_id SERIAL NOT NULL,
parent_node_id INTEGER,
.. sourcecode:: pycon+sql
>>> with engine.connect() as conn:
- ... metadata.drop_all(conn, checkfirst=False)
+ ... metadata_obj.drop_all(conn, checkfirst=False)
{opensql}ALTER TABLE element DROP CONSTRAINT fk_element_parent_node_id
DROP TABLE node
DROP TABLE element
the ``'element'`` table as follows::
element = Table(
- 'element', metadata,
+ 'element', metadata_obj,
Column('element_id', Integer, primary_key=True),
Column('parent_node_id', Integer),
ForeignKeyConstraint(
.. sourcecode:: pycon+sql
>>> with engine.connect() as conn:
- ... metadata.create_all(conn, checkfirst=False)
+ ... metadata_obj.create_all(conn, checkfirst=False)
{opensql}CREATE TABLE element (
element_id SERIAL NOT NULL,
parent_node_id INTEGER,
arguments. The value is any string which will be output after the appropriate
"ON UPDATE" or "ON DELETE" phrase::
- child = Table('child', meta,
+ child = Table('child', metadata_obj,
Column('id', Integer,
ForeignKey('parent.id', onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True
)
)
- composite = Table('composite', meta,
+ composite = Table('composite', metadata_obj,
Column('id', Integer, primary_key=True),
Column('rev_id', Integer),
Column('note_id', Integer),
from sqlalchemy import UniqueConstraint
- meta = MetaData()
- mytable = Table('mytable', meta,
+ metadata_obj = MetaData()
+ mytable = Table('mytable', metadata_obj,
# per-column anonymous unique constraint
Column('col1', Integer, unique=True),
from sqlalchemy import CheckConstraint
- meta = MetaData()
- mytable = Table('mytable', meta,
+ metadata_obj = MetaData()
+ mytable = Table('mytable', metadata_obj,
# per-column CHECK constraint
Column('col1', Integer, CheckConstraint('col1>5')),
from sqlalchemy import PrimaryKeyConstraint
- my_table = Table('mytable', metadata,
+ my_table = Table('mytable', metadata_obj,
Column('id', Integer),
Column('version_id', Integer),
Column('data', String(50)),
"pk": "pk_%(table_name)s"
}
- metadata = MetaData(naming_convention=convention)
+ metadata_obj = MetaData(naming_convention=convention)
The above convention will establish names for all constraints within
the target :class:`_schema.MetaData` collection.
For example, we can observe the name produced when we create an unnamed
:class:`.UniqueConstraint`::
- >>> user_table = Table('user', metadata,
+ >>> user_table = Table('user', metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String(30), nullable=False),
... UniqueConstraint('name')
This same feature takes effect even if we just use the :paramref:`_schema.Column.unique`
flag::
- >>> user_table = Table('user', metadata,
+ >>> user_table = Table('user', metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String(30), nullable=False, unique=True)
... )
hash of the long name. For example, the naming convention below will
generate very long names given the column names in use::
- metadata = MetaData(naming_convention={
+ metadata_obj = MetaData(naming_convention={
"uq": "uq_%(table_name)s_%(column_0_N_name)s"
})
long_names = Table(
- 'long_names', metadata,
+ 'long_names', metadata_obj,
Column('information_channel_code', Integer, key='a'),
Column('billing_convention_name', Integer, key='b'),
Column('product_identifier', Integer, key='c'),
Above, when we create a new :class:`_schema.ForeignKeyConstraint`, we will get a
name as follows::
- >>> metadata = MetaData(naming_convention=convention)
+ >>> metadata_obj = MetaData(naming_convention=convention)
- >>> user_table = Table('user', metadata,
+ >>> user_table = Table('user', metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('version', Integer, primary_key=True),
... Column('data', String(30))
... )
- >>> address_table = Table('address', metadata,
+ >>> address_table = Table('address', metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('user_id', Integer),
... Column('user_version_id', Integer)
to have a name already, and we then enhance it with other convention elements.
A typical convention is ``"ck_%(table_name)s_%(constraint_name)s"``::
- metadata = MetaData(
+ metadata_obj = MetaData(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"}
)
- Table('foo', metadata,
+ Table('foo', metadata_obj,
Column('value', Integer),
CheckConstraint('value > 5', name='value_gt_5')
)
:func:`_expression.column` element within the constraint's expression,
either by declaring the constraint separate from the table::
- metadata = MetaData(
+ metadata_obj = MetaData(
naming_convention={"ck": "ck_%(table_name)s_%(column_0_name)s"}
)
- foo = Table('foo', metadata,
+ foo = Table('foo', metadata_obj,
Column('value', Integer)
)
from sqlalchemy import column
- metadata = MetaData(
+ metadata_obj = MetaData(
naming_convention={"ck": "ck_%(table_name)s_%(column_0_name)s"}
)
- foo = Table('foo', metadata,
+ foo = Table('foo', metadata_obj,
Column('value', Integer),
CheckConstraint(column('value') > 5)
)
The name for the constraint here is most directly set up by sending
the "name" parameter, e.g. :paramref:`.Boolean.name`::
- Table('foo', metadata,
+ Table('foo', metadata_obj,
Column('flag', Boolean(name='ck_foo_flag'))
)
normally by using a convention which includes ``%(constraint_name)s``
and then applying a name to the type::
- metadata = MetaData(
+ metadata_obj = MetaData(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"}
)
- Table('foo', metadata,
+ Table('foo', metadata_obj,
Column('flag', Boolean(name='flag_bool'))
)
which works nicely with :class:`.SchemaType` since these constraints have
only one column::
- metadata = MetaData(
+ metadata_obj = MetaData(
naming_convention={"ck": "ck_%(table_name)s_%(column_0_name)s"}
)
- Table('foo', metadata,
+ Table('foo', metadata_obj,
Column('flag', Boolean())
)
.. sourcecode:: python+sql
- meta = MetaData()
- mytable = Table('mytable', meta,
+ metadata_obj = MetaData()
+ mytable = Table('mytable', metadata_obj,
# an indexed column, with index "ix_mytable_col1"
Column('col1', Integer, index=True),
"inline" definition inside the :class:`_schema.Table`, using string names to
identify columns::
- meta = MetaData()
- mytable = Table('mytable', meta,
+ metadata_obj = MetaData()
+ mytable = Table('mytable', metadata_obj,
Column('col1', Integer),
Column('col2', Integer),
def column_expression(self, col):
return func.pgp_sym_decrypt(col, self.passphrase)
- metadata = MetaData()
- message = Table('message', metadata,
+ metadata_obj = MetaData()
+ message = Table('message', metadata_obj,
Column('username', String(50)),
Column('message',
PGPString("this is my passphrase")),
engine = create_engine("postgresql://scott:tiger@localhost/test", echo=True)
with engine.begin() as conn:
- metadata.create_all(conn)
+ metadata_obj.create_all(conn)
conn.execute(message.insert(), username="some user",
message="this is my message")
The simplest kind of default is a scalar value used as the default value of a column::
- Table("mytable", meta,
+ Table("mytable", metadata_obj,
Column("somecolumn", Integer, default=12)
)
not very common (as UPDATE statements are usually looking for dynamic
defaults)::
- Table("mytable", meta,
+ Table("mytable", metadata_obj,
Column("somecolumn", Integer, onupdate=25)
)
i += 1
return i
- t = Table("mytable", meta,
+ t = Table("mytable", metadata_obj,
Column('id', Integer, primary_key=True, default=mydefault),
)
import datetime
- t = Table("mytable", meta,
+ t = Table("mytable", metadata_obj,
Column('id', Integer, primary_key=True),
# define 'last_updated' to be populated with datetime.now()
def mydefault(context):
return context.get_current_parameters()['counter'] + 12
- t = Table('mytable', meta,
+ t = Table('mytable', metadata_obj,
Column('counter', Integer),
Column('counter_plus_twelve', Integer, default=mydefault, onupdate=mydefault)
)
also be passed SQL expressions, which are in most cases rendered inline within the
INSERT or UPDATE statement::
- t = Table("mytable", meta,
+ t = Table("mytable", metadata_obj,
Column('id', Integer, primary_key=True),
# define 'create_date' to default to now()
.. sourcecode:: python+sql
- t = Table('test', meta,
+ t = Table('test', metadata_obj,
Column('abc', String(20), server_default='abc'),
Column('created_at', DateTime, server_default=func.sysdate()),
Column('index_value', Integer, server_default=text("0"))
from sqlalchemy.schema import FetchedValue
- t = Table('test', meta,
+ t = Table('test', metadata_obj,
Column('id', Integer, primary_key=True),
Column('abc', TIMESTAMP, server_default=FetchedValue()),
Column('def', String(20), server_onupdate=FetchedValue())
configured to fire off during UPDATE operations if desired. It is most
commonly used in conjunction with a single integer primary key column::
- table = Table("cartitems", meta,
+ table = Table("cartitems", metadata_obj,
Column(
"cart_id",
Integer,
- Sequence('cart_id_seq', metadata=meta), primary_key=True),
+ Sequence('cart_id_seq', metadata=metadata_obj), primary_key=True),
Column("description", String(40)),
Column("createdate", DateTime())
)
For many years, the SQLAlchemy documentation referred to the
example of associating a :class:`.Sequence` with a table as follows::
- table = Table("cartitems", meta,
+ table = Table("cartitems", metadata_obj,
Column("cart_id", Integer, Sequence('cart_id_seq'),
primary_key=True),
Column("description", String(40)),
the :class:`.Sequence` in most cases be explicitly associated with the
:class:`_schema.MetaData`, using the :paramref:`.Sequence.metadata` parameter::
- table = Table("cartitems", meta,
+ table = Table("cartitems", metadata_obj,
Column(
"cart_id",
Integer,
- Sequence('cart_id_seq', metadata=meta), primary_key=True),
+ Sequence('cart_id_seq', metadata=metadata_obj), primary_key=True),
Column("description", String(40)),
Column("createdate", DateTime())
)
:class:`_schema.Column` as the **Python side default generator**::
Column(
- "cart_id", Integer, Sequence('cart_id_seq', metadata=meta),
+ "cart_id", Integer, Sequence('cart_id_seq', metadata=metadata_obj),
primary_key=True)
In the above case, the :class:`.Sequence` will automatically be subject
:class:`_schema.Column` both as the Python-side default generator as well as
the server-side default generator::
- cart_id_seq = Sequence('cart_id_seq', metadata=meta)
- table = Table("cartitems", meta,
+ cart_id_seq = Sequence('cart_id_seq', metadata=metadata_obj)
+ table = Table("cartitems", metadata_obj,
Column(
"cart_id", Integer, cart_id_seq,
server_default=cart_id_seq.next_value(), primary_key=True),
from sqlalchemy import Table, Column, MetaData, Integer, Computed
- metadata = MetaData()
+ metadata_obj = MetaData()
square = Table(
"square",
- metadata,
+ metadata_obj,
Column("id", Integer, primary_key=True),
Column("side", Integer),
Column("area", Integer, Computed("side * side")),
from sqlalchemy import Table, Column, MetaData, Integer, Computed
- metadata = MetaData()
+ metadata_obj = MetaData()
data = Table(
"data",
- metadata,
+ metadata_obj,
Column('id', Integer, Identity(start=42, cycle=True), primary_key=True),
Column('data', String)
)
@event.listens_for(engine, "connect")
def connect(dbapi_connection, connection_record):
- cursor = dbapi_connection.cursor()
- cursor.execute("SET some session variables")
- cursor.close()
+ cursor_obj = dbapi_connection.cursor()
+ cursor_obj.execute("SET some session variables")
+ cursor_obj.close()
Fully Replacing the DBAPI ``connect()`` function
from sqlalchemy import *
- metadata = MetaData()
+ metadata_obj = MetaData()
:class:`~sqlalchemy.schema.MetaData` is a container object that keeps together
many different features of a database (or multiple databases) being described.
The remaining positional arguments are mostly
:class:`~sqlalchemy.schema.Column` objects describing each column::
- user = Table('user', metadata,
+ user = Table('user', metadata_obj,
Column('user_id', Integer, primary_key=True),
Column('user_name', String(16), nullable=False),
Column('email_address', String(60)),
dependency (that is, each table is preceded by all tables which it
references)::
- >>> for t in metadata.sorted_tables:
+ >>> for t in metadata_obj.sorted_tables:
... print(t.name)
user
user_preference
accessors which allow inspection of its properties. Given the following
:class:`~sqlalchemy.schema.Table` definition::
- employees = Table('employees', metadata,
+ employees = Table('employees', metadata_obj,
Column('employee_id', Integer, primary_key=True),
Column('employee_name', String(60), nullable=False),
Column('employee_dept', Integer, ForeignKey("departments.department_id"))
engine = create_engine('sqlite:///:memory:')
- metadata = MetaData()
+ metadata_obj = MetaData()
- user = Table('user', metadata,
+ user = Table('user', metadata_obj,
Column('user_id', Integer, primary_key=True),
Column('user_name', String(16), nullable=False),
Column('email_address', String(60), key='email'),
Column('nickname', String(50), nullable=False)
)
- user_prefs = Table('user_prefs', metadata,
+ user_prefs = Table('user_prefs', metadata_obj,
Column('pref_id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
Column('pref_name', String(40), nullable=False),
Column('pref_value', String(100))
)
- {sql}metadata.create_all(engine)
+ {sql}metadata_obj.create_all(engine)
PRAGMA table_info(user){}
CREATE TABLE user(
user_id INTEGER NOT NULL PRIMARY KEY,
engine = create_engine('sqlite:///:memory:')
- meta = MetaData()
+ metadata_obj = MetaData()
- employees = Table('employees', meta,
+ employees = Table('employees', metadata_obj,
Column('employee_id', Integer, primary_key=True),
Column('employee_name', String(60), nullable=False, key='name'),
Column('employee_dept', Integer, ForeignKey("departments.department_id"))
The most basic example is that of the :paramref:`_schema.Table.schema` argument
using a Core :class:`_schema.Table` object as follows::
- metadata = MetaData()
+ metadata_obj = MetaData()
financial_info = Table(
'financial_info',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('value', String(100), nullable=False),
schema='remote_banks'
in the :attr:`_schema.MetaData.tables` collection by searching for the
key ``'remote_banks.financial_info'``::
- >>> metadata.tables['remote_banks.financial_info']
+ >>> metadata_obj.tables['remote_banks.financial_info']
Table('financial_info', MetaData(),
Column('id', Integer(), table=<financial_info>, primary_key=True, nullable=False),
Column('value', String(length=100), table=<financial_info>, nullable=False),
customer = Table(
"customer",
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('financial_info_id', ForeignKey("remote_banks.financial_info.id")),
schema='remote_banks'
:paramref:`_schema.MetaData.schema` argument to the top level :class:`_schema.MetaData`
construct::
- metadata = MetaData(schema="remote_banks")
+ metadata_obj = MetaData(schema="remote_banks")
financial_info = Table(
'financial_info',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('value', String(100), nullable=False),
)
includes that the :class:`_schema.Table` is cataloged in the :class:`_schema.MetaData`
using the schema-qualified name, that is::
- metadata.tables['remote_banks.financial_info']
+ metadata_obj.tables['remote_banks.financial_info']
When using the :class:`_schema.ForeignKey` or :class:`_schema.ForeignKeyConstraint`
objects to refer to this table, either the schema-qualified name or the
refers_to_financial_info = Table(
'refers_to_financial_info',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('fiid', ForeignKey('financial_info.id')),
)
refers_to_financial_info = Table(
'refers_to_financial_info',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('fiid', ForeignKey('remote_banks.financial_info.id')),
)
from sqlalchemy import BLANK_SCHEMA
- metadata = MetaData(schema="remote_banks")
+ metadata_obj = MetaData(schema="remote_banks")
financial_info = Table(
'financial_info',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('value', String(100), nullable=False),
schema=BLANK_SCHEMA # will not use "remote_banks"
@event.listens_for(engine, "connect", insert=True)
def set_current_schema(dbapi_connection, connection_record):
- cursor = dbapi_connection.cursor()
- cursor.execute("ALTER SESSION SET CURRENT_SCHEMA=%s" % schema_name)
- cursor.close()
+ cursor_obj = dbapi_connection.cursor()
+ cursor_obj.execute("ALTER SESSION SET CURRENT_SCHEMA=%s" % schema_name)
+ cursor_obj.close()
Above, the ``set_current_schema()`` event handler will take place immediately
when the above :class:`_engine.Engine` first connects; as the event is
"InnoDB". This can be expressed with :class:`~sqlalchemy.schema.Table` using
``mysql_engine``::
- addresses = Table('engine_email_addresses', meta,
+ addresses = Table('engine_email_addresses', metadata_obj,
Column('address_id', Integer, primary_key=True),
Column('remote_user_id', Integer, ForeignKey(users.c.user_id)),
Column('email_address', String(20)),
>>> from sqlalchemy import create_engine
>>> engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
>>> from sqlalchemy import MetaData, Table, Column, Integer, String
- >>> metadata = MetaData()
+ >>> metadata_obj = MetaData()
>>> user_table = Table(
... "user_account",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String(30)),
... Column('fullname', String)
>>> from sqlalchemy import ForeignKey
>>> address_table = Table(
... "address",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('user_id', None, ForeignKey('user_account.id')),
... Column('email_address', String, nullable=False)
... )
- >>> metadata.create_all(engine)
+ >>> metadata_obj.create_all(engine)
BEGIN (implicit)
...
>>> from sqlalchemy.orm import declarative_base
conn = mypool.connect()
# use it
- cursor = conn.cursor()
- cursor.execute("select foo")
+ cursor_obj = conn.cursor()
+ cursor_obj.execute("select foo")
The purpose of the transparent proxy is to intercept the ``close()`` call,
such that instead of the DBAPI connection being closed, it is returned to the
located tables are present within the :class:`~sqlalchemy.schema.MetaData`
object's dictionary of tables::
- meta = MetaData()
- meta.reflect(bind=someengine)
- users_table = meta.tables['users']
- addresses_table = meta.tables['addresses']
+ metadata_obj = MetaData()
+ metadata_obj.reflect(bind=someengine)
+ users_table = metadata_obj.tables['users']
+ addresses_table = metadata_obj.tables['addresses']
``metadata.reflect()`` also provides a handy way to clear or delete all the rows in a database::
- meta = MetaData()
- meta.reflect(bind=someengine)
- for table in reversed(meta.sorted_tables):
+ metadata_obj = MetaData()
+ metadata_obj.reflect(bind=someengine)
+ for table in reversed(metadata_obj.sorted_tables):
someengine.execute(table.delete())
.. _metadata_reflection_inspector:
>>> from sqlalchemy import MetaData, Table, create_engine
>>> mysql_engine = create_engine("mysql://scott:tiger@localhost/test")
- >>> metadata = MetaData()
- >>> my_mysql_table = Table("my_table", metadata, autoload_with=mysql_engine)
+ >>> metadata_obj = MetaData()
+ >>> my_mysql_table = Table("my_table", metadata_obj, autoload_with=mysql_engine)
The above example reflects the above table schema into a new :class:`_schema.Table`
object. We can then, for demonstration purposes, print out the MySQL-specific
.. sourcecode:: pycon+sql
>>> from sqlalchemy import event
- >>> metadata = MetaData()
+ >>> metadata_obj = MetaData()
- >>> @event.listens_for(metadata, "column_reflect")
+ >>> @event.listens_for(metadata_obj, "column_reflect")
>>> def genericize_datatypes(inspector, tablename, column_dict):
... column_dict["type"] = column_dict["type"].as_generic()
- >>> my_generic_table = Table("my_table", metadata, autoload_with=mysql_engine)
+ >>> my_generic_table = Table("my_table", metadata_obj, autoload_with=mysql_engine)
We now get a new :class:`_schema.Table` that is generic and uses
:class:`_types.Integer` for those datatypes. We can now emit a
.. sourcecode:: pycon+sql
>>> from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey
- >>> metadata = MetaData()
- >>> users = Table('users', metadata,
+ >>> metadata_obj = MetaData()
+ >>> users = Table('users', metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String),
... Column('fullname', String),
... )
- >>> addresses = Table('addresses', metadata,
+ >>> addresses = Table('addresses', metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('user_id', None, ForeignKey('users.id')),
... Column('email_address', String, nullable=False)
.. sourcecode:: pycon+sql
- {sql}>>> metadata.create_all(engine)
+ {sql}>>> metadata_obj.create_all(engine)
BEGIN...
CREATE TABLE users (
id INTEGER NOT NULL,
A full, foolproof :class:`~sqlalchemy.schema.Table` is therefore::
- users = Table('users', metadata,
+ users = Table('users', metadata_obj,
Column('id', Integer, Sequence('user_id_seq'), primary_key=True),
Column('name', String(50)),
Column('fullname', String(50)),
engine = create_engine("sqlite://")
Session = sessionmaker()
- metadata = MetaData(bind=engine)
- Base = declarative_base(metadata=metadata)
+ metadata_obj = MetaData(bind=engine)
+ Base = declarative_base(metadata=metadata_obj)
class MyClass(Base):
# ...
:meth:`.Executable.execute` method directly off of a Core expression object
that is not associated with any :class:`_engine.Engine`::
- metadata = MetaData()
- table = Table('t', metadata, Column('q', Integer))
+ metadata_obj = MetaData()
+ table = Table('t', metadata_obj, Column('q', Integer))
stmt = select(table)
result = stmt.execute() # <--- raises
been **bound** to a :class:`_engine.Engine`::
engine = create_engine("mysql+pymysql://user:pass@host/db")
- metadata = MetaData(bind=engine)
+ metadata_obj = MetaData(bind=engine)
Where above, any statement that derives from a :class:`_schema.Table` which
in turn derives from that :class:`_schema.MetaData` will implicitly make use of
trans.rollback()
time.sleep(retry_interval)
- context.cursor = cursor = connection.connection.cursor()
+ context.cursor = cursor_obj = connection.connection.cursor()
else:
raise
else:
e = engine.execution_options(isolation_level="AUTOCOMMIT")
@event.listens_for(e, "do_execute_no_params")
- def do_execute_no_params(cursor, statement, context):
+ def do_execute_no_params(cursor_obj, statement, context):
return _run_with_retries(
- context.dialect.do_execute_no_params, context, cursor, statement
+ context.dialect.do_execute_no_params, context, cursor_obj, statement
)
@event.listens_for(e, "do_execute")
- def do_execute(cursor, statement, parameters, context):
+ def do_execute(cursor_obj, statement, parameters, context):
return _run_with_retries(
- context.dialect.do_execute, context, cursor, statement, parameters
+ context.dialect.do_execute, context, cursor_obj, statement, parameters
)
return e
engine = create_engine(...)
conn = engine.connect()
conn.connection.<do DBAPI things>
- cursor = conn.connection.cursor(<DBAPI specific arguments..>)
+ cursor_obj = conn.connection.cursor(<DBAPI specific arguments..>)
You must ensure that you revert any isolation level settings or other
operation-specific settings on the connection back to normal before returning
This is available via the :attr:`_schema.MetaData.sorted_tables` function::
- metadata = MetaData()
+ metadata_obj = MetaData()
# ... add Table objects to metadata
- ti = metadata.sorted_tables:
+ ti = metadata_obj.sorted_tables:
for t in ti:
print(t)
def dump(sql, *multiparams, **params):
print(sql.compile(dialect=engine.dialect))
engine = create_mock_engine('postgresql://', dump)
- metadata.create_all(engine, checkfirst=False)
+ metadata_obj.create_all(engine, checkfirst=False)
The `Alembic <https://alembic.sqlalchemy.org>`_ tool also supports
an "offline" SQL generation mode that renders database migrations as SQL scripts.
class MyMixin(object):
@classmethod
- def __table_cls__(cls, name, metadata, *arg, **kw):
+ def __table_cls__(cls, name, metadata_obj, *arg, **kw):
return Table(
"my_" + name,
- metadata, *arg, **kw
+ metadata_obj, *arg, **kw
)
The above mixin would cause all :class:`_schema.Table` objects generated to include
or :func:`_orm.declarative_base`::
from sqlalchemy import MetaData
- metadata = MetaData(schema="some_schema")
+ metadata_obj = MetaData(schema="some_schema")
- Base = declarative_base(metadata = metadata)
+ Base = declarative_base(metadata = metadata_obj)
class MyClass(Base):
A **semi-classical mapping** for example makes use of Declarative, but
establishes the :class:`_schema.Table` objects separately::
- metadata = Base.metadata
+ metadata_obj = Base.metadata
employees_table = Table(
- 'employee', metadata,
+ 'employee', metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String(50)),
)
managers_table = Table(
- 'manager', metadata,
+ 'manager', metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String(50)),
Column('manager_data', String(50)),
)
engineers_table = Table(
- 'engineer', metadata,
+ 'engineer', metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String(50)),
Column('engineer_info', String(50)),
from sqlalchemy import Integer, ForeignKey, String, Column, Table, MetaData
from sqlalchemy.orm import relationship, registry
- metadata = MetaData()
+ metadata_obj = MetaData()
mapper_registry = registry()
- node_to_node = Table("node_to_node", metadata,
+ node_to_node = Table("node_to_node", metadata_obj,
Column("left_node_id", Integer, ForeignKey("node.id"), primary_key=True),
Column("right_node_id", Integer, ForeignKey("node.id"), primary_key=True)
)
- node = Table("node", metadata,
+ node = Table("node", metadata_obj,
Column('id', Integer, primary_key=True),
Column('label', String)
)
via the ``properties`` dictionary. The example below illustrates a second :class:`_schema.Table`
object, mapped to a class called ``Address``, then linked to ``User`` via :func:`_orm.relationship`::
- address = Table('address', metadata,
+ address = Table('address', metadata_obj,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('user.id')),
Column('email_address', String(50))
user_id: int = field(init=False)
email_address: str = None
- metadata = MetaData()
+ metadata_obj = MetaData()
user = Table(
'user',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String(50)),
Column('fullname', String(50)),
address = Table(
'address',
- metadata,
+ metadata_obj,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('user.id')),
Column('email_address', String(50)),
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import column_property
- metadata = MetaData()
+ metadata_obj = MetaData()
# define two Table objects
- user_table = Table('user', metadata,
+ user_table = Table('user', metadata_obj,
Column('id', Integer, primary_key=True),
Column('name', String),
)
- address_table = Table('address', metadata,
+ address_table = Table('address', metadata_obj,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('user.id')),
Column('email_address', String)
>>> from sqlalchemy import create_engine
>>> engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
>>> from sqlalchemy import MetaData, Table, Column, Integer, String
- >>> metadata = MetaData()
+ >>> metadata_obj = MetaData()
>>> user_table = Table(
... "user_account",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String(30)),
... Column('fullname', String)
>>> from sqlalchemy import ForeignKey
>>> address_table = Table(
... "address",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('user_id', None, ForeignKey('user_account.id')),
... Column('email_address', String, nullable=False)
... )
>>> orders_table = Table(
... "user_order",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('user_id', None, ForeignKey('user_account.id')),
... Column('email_address', String, nullable=False)
... )
>>> order_items_table = Table(
... "order_items",
- ... metadata,
+ ... metadata_obj,
... Column("order_id", ForeignKey("user_order.id"), primary_key=True),
... Column("item_id", ForeignKey("item.id"), primary_key=True)
... )
>>> items_table = Table(
... "item",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String),
... Column('description', String)
... )
- >>> metadata.create_all(engine)
+ >>> metadata_obj.create_all(engine)
BEGIN (implicit)
...
>>> from sqlalchemy.orm import declarative_base
{sql}>>> stmt = session.query(Address).\
... filter(Address.email_address != 'j25@yahoo.com').\
... subquery()
- >>> adalias = aliased(Address, stmt)
- >>> for user, address in session.query(User, adalias).\
- ... join(adalias, User.addresses):
+ >>> addr_alias = aliased(Address, stmt)
+ >>> for user, address in session.query(User, addr_alias).\
+ ... join(addr_alias, User.addresses):
... print(user)
... print(address)
SELECT users.id AS users_id,
.. sourcecode:: pycon+sql
>>> from sqlalchemy import select, bindparam
- >>> scalar_subquery = (
+ >>> scalar_subq = (
... select(user_table.c.id).
... where(user_table.c.name==bindparam('username')).
... scalar_subquery()
>>> with engine.connect() as conn:
... result = conn.execute(
- ... insert(address_table).values(user_id=scalar_subquery),
+ ... insert(address_table).values(user_id=scalar_subq),
... [
... {"username": 'spongebob', "email_address": "spongebob@sqlalchemy.org"},
... {"username": 'sandy', "email_address": "sandy@sqlalchemy.org"},
.. sourcecode:: python+sql
- >>> cte = select(Address).where(~Address.email_address.like('%@aol.com')).cte()
- >>> address_cte = aliased(Address, cte)
+ >>> cte_obj = select(Address).where(~Address.email_address.like('%@aol.com')).cte()
+ >>> address_cte = aliased(Address, cte_obj)
>>> stmt = select(User, address_cte).join_from(User, address_cte).order_by(User.id, address_cte.id)
>>> with Session(engine) as session:
... for user, address in session.execute(stmt):
name. Constructing this object looks like::
>>> from sqlalchemy import MetaData
- >>> metadata = MetaData()
+ >>> metadata_obj = MetaData()
Having a single :class:`_schema.MetaData` object for an entire application is
the most common case, represented as a module-level variable in a single place
>>> from sqlalchemy import Table, Column, Integer, String
>>> user_table = Table(
... "user_account",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('name', String(30)),
... Column('fullname', String)
>>> from sqlalchemy import ForeignKey
>>> address_table = Table(
... "address",
- ... metadata,
+ ... metadata_obj,
... Column('id', Integer, primary_key=True),
... Column('user_id', ForeignKey('user_account.id'), nullable=False),
... Column('email_address', String, nullable=False)
.. sourcecode:: pycon+sql
- >>> metadata.create_all(engine)
+ >>> metadata_obj.create_all(engine)
{opensql}BEGIN (implicit)
PRAGMA main.table_...info("user_account")
...
.. sourcecode:: pycon+sql
- >>> some_table = Table("some_table", metadata, autoload_with=engine)
+ >>> some_table = Table("some_table", metadata_obj, autoload_with=engine)
{opensql}BEGIN (implicit)
PRAGMA main.table_...info("some_table")
[raw sql] ()