sleep 10
docker exec -ti postgres psql -U scott -c 'CREATE SCHEMA test_schema; CREATE SCHEMA test_schema_2;' test
# this last command is optional
- docker exec -ti postgres bash sed -i 's/#max_prepared_transactions = 0/max_prepared_transactions = 10/g' /var/lib/postgresql/data/postgresql.conf
+ docker exec -ti postgres sed -i 's/#max_prepared_transactions = 0/max_prepared_transactions = 10/g' /var/lib/postgresql/data/postgresql.conf
# To stop the container. It will also remove it.
docker stop postgres
SQLAlchemy 2.0 itself, and support for this new system in full is expected
to occur over the course of many major releases.
+.. _migration_20_autocommit:
+
Library-level (but not driver level) "Autocommit" removed from both Core and ORM
================================================================================
conn = engine.connect()
# won't autocommit in 2.0
- conn.execute("INSERT INTO table (foo) VALUES ('bar')")
+ conn.execute(text("INSERT INTO table (foo) VALUES ('bar')"))
The options to force "autocommit" for specific connections or statements
are also removed::
conn.execute(some_table.insert().values(foo='bar'))
conn.commit()
- conn.execute("some other SQL")
+ conn.execute(text("some other SQL"))
conn.rollback()
For the ORM, the above two patterns are already more or less how the
rollback will occur. This is the case already for Core and ORM::
with engine.connect() as conn:
- results = conn.execute("select * from some_table")
+ results = conn.execute(text("select * from some_table"))
return results
# connection is returned to the pool, transaction is implicitly
To use a connection in autocommit mode::
with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
- result = conn.execute(...)
+ result = conn.execute(stmt)
The above code is already available in current SQLAlchemy releases. Driver
support is available for PostgreSQL, MySQL, SQL Server, and newer releases
will include support for Oracle and SQLite as well.
+.. _migration_20_implicit_execution:
"Implicit" and "Connectionless" execution, "bound metadata" removed
====================================================================
engine is initialized per process. See :ref:`pooling_multiprocessing` for
details.
-
The most basic function of the :class:`.Engine` is to provide access to a
:class:`.Connection`, which can then invoke SQL statements. To emit
a textual statement to the database looks like::
+ from sqlalchemy import text
+
with engine.connect() as connection:
- result = connection.execute("select username from users")
+ result = connection.execute(text("select username from users"))
for row in result:
print("username:", row['username'])
any transactional state or locks are removed, and the connection is ready for
its next use.
-Our example above illustrated the execution of a textual SQL string.
-The :meth:`~.Connection.execute` method can of course accommodate more than
-that, including the variety of SQL expression constructs described
-in :ref:`sqlexpression_toplevel`.
+.. deprecated:: 2.0 The :class:`.ResultProxy` object is replaced in SQLAlchemy
+ 2.0 with a newly refined object known as :class:`.Result`.
+
+Our example above illustrated the execution of a textual SQL string, which
+should be invoked by using the :func:`.text` construct to indicate that
+we'd like to use textual SQL. The :meth:`~.Connection.execute` method can of
+course accommodate more than that, including the variety of SQL expression
+constructs described in :ref:`sqlexpression_toplevel`.
+
Using Transactions
==================
def method_b(connection):
with connection.begin(): # open a transaction - this runs in the
# context of method_a's transaction
- connection.execute("insert into mytable values ('bat', 'lala')")
+ connection.execute(text("insert into mytable values ('bat', 'lala')"))
connection.execute(mytable.insert(), {"col1": "bat", "col2": "lala"})
# open a Connection and call method_a
Understanding Autocommit
========================
-.. note:: The "autocommit" feature of SQLAlchemy is a legacy feature that will
- be deprecated in an upcoming release. New usage paradigms will eliminate
- the need for it to be present.
+.. deprecated:: 2.0 The "autocommit" feature of SQLAlchemy Core is deprecated
+ and will not be present in version 2.0 of SQLAlchemy.
+ See :ref:`migration_20_autocommit` for background.
The previous transaction example illustrates how to use :class:`.Transaction`
so that several executions can take part in the same transaction. What happens
for a particular backend::
conn = engine.connect()
- conn.execute("INSERT INTO users VALUES (1, 'john')") # autocommits
+ conn.execute(text("INSERT INTO users VALUES (1, 'john')")) # autocommits
The "autocommit" feature is only in effect when no :class:`.Transaction` has
otherwise been declared. This means the feature is not generally used with
.. _dbengine_implicit:
+
Connectionless Execution, Implicit Execution
============================================
-.. note:: "Connectionless" and "implicit" execution are legacy SQLAlchemy
- features that will be deprecated in an upcoming release.
+.. deprecated:: 2.0 The features of "connectionless" and "implicit" execution
+ in SQLAlchemy are deprecated and will be removed in version 2.0. See
+ :ref:`migration_20_implicit_execution` for background.
Recall from the first section we mentioned executing with and without explicit
usage of :class:`.Connection`. "Connectionless" execution
-refers to the usage of the ``execute()`` method on an object which is not a
-:class:`.Connection`. This was illustrated using the :meth:`~.Engine.execute` method
-of :class:`.Engine`::
+refers to the usage of the ``execute()`` method on an object
+which is not a :class:`.Connection`. This was illustrated using the
+:meth:`~.Engine.execute` method of :class:`.Engine`::
- result = engine.execute("select username from users")
+ result = engine.execute(text("select username from users"))
for row in result:
print("username:", row['username'])
.. _dbapi_connections:
-Working with Raw DBAPI Connections
-==================================
+Working with Driver SQL and Raw DBAPI Connections
+=================================================
+
+The introduction on using :meth:`.Connection.execute` made use of the
+:func:`.sql.text` construct in order to illustrate how textual SQL statements
+may be invoked. When working with SQLAlchemy, textual SQL is actually more
+of the exception rather than the norm, as the Core expression language
+and the ORM both abstract away the textual representation of SQL. Hpwever, the
+:func:`.sql.text` construct itself also provides some abstraction of textual
+SQL in that it normalizes how bound parameters are passed, as well as that
+it supports datatyping behavior for parameters and result set rows.
+
+Invoking SQL strings directly to the driver
+--------------------------------------------
+
+For the use case where one wants to invoke textual SQL directly passed to the
+underlying driver (known as the :term:`DBAPI`) without any intervention
+from the :func:`.sql.text` construct, the :meth:`.Connection.exec_driver_sql`
+method may be used::
+
+ with engine.connect() as conn:
+ conn.exec_driver_sql("SET param='bar'")
+
+
+.. versionadded:: 1.4 Added the :meth:`.Connection.exec_driver_sql` method.
+
+Working with the DBAPI cursor directly
+--------------------------------------
There are some cases where SQLAlchemy does not provide a genericized way
at accessing some :term:`DBAPI` functions, such as calling stored procedures as well
Altering Schemas through Migrations
-----------------------------------
-While SQLAlchemy directly supports emitting CREATE and DROP statements for schema
-constructs, the ability to alter those constructs, usually via the ALTER statement
-as well as other database-specific constructs, is outside of the scope of SQLAlchemy
-itself. While it's easy enough to emit ALTER statements and similar by hand,
-such as by passing a string to :meth:`.Connection.execute` or by using the
-:class:`.DDL` construct, it's a common practice to automate the maintenance of
-database schemas in relation to application code using schema migration tools.
+While SQLAlchemy directly supports emitting CREATE and DROP statements for
+schema constructs, the ability to alter those constructs, usually via the ALTER
+statement as well as other database-specific constructs, is outside of the
+scope of SQLAlchemy itself. While it's easy enough to emit ALTER statements
+and similar by hand, such as by passing a :func:`.text` construct to
+:meth:`.Connection.execute` or by using the :class:`.DDL` construct, it's a
+common practice to automate the maintenance of database schemas in relation to
+application code using schema migration tools.
There are two major migration tools available for SQLAlchemy:
try:
# suppose the database has been restarted.
- c.execute("SELECT * FROM table")
+ c.execute(text("SELECT * FROM table"))
c.close()
except exc.DBAPIError, e:
# an exception is raised, Connection is invalidated.
# after the invalidate event, a new connection
# starts with a new Pool
c = e.connect()
- c.execute("SELECT * FROM table")
+ c.execute(text("SELECT * FROM table"))
The above example illustrates that no special intervention is needed to
refresh the pool, which continues normally after a disconnection event is
engine.dispose()
with engine.connect() as conn:
- conn.execute("...")
+ conn.execute(text("..."))
p = Process(target=run_in_process)
transaction2 = connection.begin()
transaction2.rollback()
- connection.execute("select 1") # we are rolled back; will now raise
+ connection.execute(text("select 1")) # we are rolled back; will now raise
transaction.rollback()
to release transactions as quickly as possible. The most common cause
of hanging transactions is a non-fully consumed result set, i.e.::
- result = engine.execute("select * from table")
+ result = engine.execute(text("select * from table"))
row = result.fetchone()
return
FROM rdb$relations
WHERE rdb$relation_name=?)
"""
- c = connection.execute(tblqry, [self.denormalize_name(table_name)])
+ c = connection.exec_driver_sql(
+ tblqry, [self.denormalize_name(table_name)]
+ )
return c.first() is not None
def has_sequence(self, connection, sequence_name, schema=None):
FROM rdb$generators
WHERE rdb$generator_name=?)
"""
- c = connection.execute(genqry, [self.denormalize_name(sequence_name)])
+ c = connection.exec_driver_sql(
+ genqry, [self.denormalize_name(sequence_name)]
+ )
return c.first() is not None
@reflection.cache
# FROM rdb$relation_fields
# WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
- return [self.normalize_name(row[0]) for row in connection.execute(s)]
+ return [
+ self.normalize_name(row[0])
+ for row in connection.exec_driver_sql(s)
+ ]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
where rdb$view_blr is not null
and (rdb$system_flag is null or rdb$system_flag = 0);
"""
- return [self.normalize_name(row[0]) for row in connection.execute(s)]
+ return [
+ self.normalize_name(row[0])
+ for row in connection.exec_driver_sql(s)
+ ]
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None, **kw):
FROM rdb$relations
WHERE rdb$relation_name=?
"""
- rp = connection.execute(qry, [self.denormalize_name(view_name)])
+ rp = connection.exec_driver_sql(
+ qry, [self.denormalize_name(view_name)]
+ )
row = rp.first()
if row:
return row["view_source"]
"""
tablename = self.denormalize_name(table_name)
# get primary key fields
- c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
+ c = connection.exec_driver_sql(keyqry, ["PRIMARY KEY", tablename])
pkfields = [self.normalize_name(r["fname"]) for r in c.fetchall()]
return {"constrained_columns": pkfields, "name": None}
FROM rdb$dependencies trigdep2
WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
"""
- genr = connection.execute(genqry, [tablename, colname]).first()
+ genr = connection.exec_driver_sql(genqry, [tablename, colname]).first()
if genr is not None:
return dict(name=self.normalize_name(genr["fgenerator"]))
tablename = self.denormalize_name(table_name)
# get all of the fields for this table
- c = connection.execute(tblqry, [tablename])
+ c = connection.exec_driver_sql(tblqry, [tablename])
cols = []
while True:
row = c.fetchone()
"""
tablename = self.denormalize_name(table_name)
- c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
+ c = connection.exec_driver_sql(fkqry, ["FOREIGN KEY", tablename])
fks = util.defaultdict(
lambda: {
"name": None,
AND rdb$relation_constraints.rdb$constraint_type IS NULL
ORDER BY index_name, ic.rdb$field_position
"""
- c = connection.execute(qry, [self.denormalize_name(table_name)])
+ c = connection.exec_driver_sql(
+ qry, [self.denormalize_name(table_name)]
+ )
indexes = util.defaultdict(dict)
for row in c:
def _switch_db(dbname, connection, fn, *arg, **kw):
if dbname:
- current_db = connection.scalar("select db_name()")
+ current_db = connection.exec_driver_sql("select db_name()").scalar()
if current_db != dbname:
- connection.execute(
+ connection.exec_driver_sql(
"use %s"
% connection.dialect.identifier_preparer.quote_schema(dbname)
)
return fn(*arg, **kw)
finally:
if dbname and current_db != dbname:
- connection.execute(
+ connection.exec_driver_sql(
"use %s"
% connection.dialect.identifier_preparer.quote_schema(
current_db
def do_savepoint(self, connection, name):
# give the DBAPI a push
- connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION")
+ connection.exec_driver_sql("IF @@TRANCOUNT = 0 BEGIN TRANSACTION")
super(MSDialect, self).do_savepoint(connection, name)
def do_release_savepoint(self, connection, name):
for col in cols:
colmap[col["name"]] = col
# We also run an sp_columns to check for identity columns:
- cursor = connection.execute(
+ cursor = connection.exec_driver_sql(
"sp_columns @table_name = '%s', "
"@table_owner = '%s'" % (tablename, owner)
)
if ic is not None and self.server_version_info >= MS_2005_VERSION:
table_fullname = "%s.%s" % (owner, tablename)
- cursor = connection.execute(
+ cursor = connection.exec_driver_sql(
"select ident_seed('%s'), ident_incr('%s')"
% (table_fullname, table_fullname)
)
@create_db.for_db("mssql")
def _mssql_create_db(cfg, eng, ident):
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
- conn.execute("create database %s" % ident)
- conn.execute(
+ conn.exec_driver_sql("create database %s" % ident)
+ conn.exec_driver_sql(
"ALTER DATABASE %s SET ALLOW_SNAPSHOT_ISOLATION ON" % ident
)
- conn.execute(
+ conn.exec_driver_sql(
"ALTER DATABASE %s SET READ_COMMITTED_SNAPSHOT ON" % ident
)
- conn.execute("use %s" % ident)
- conn.execute("create schema test_schema")
- conn.execute("create schema test_schema_2")
+ conn.exec_driver_sql("use %s" % ident)
+ conn.exec_driver_sql("create schema test_schema")
+ conn.exec_driver_sql("create schema test_schema_2")
@drop_db.for_db("mssql")
try:
# typically when this happens, we can't KILL the session anyway,
# so let the cleanup process drop the DBs
- # for row in conn.execute(
+ # for row in conn.exec_driver_sql(
# "select session_id from sys.dm_exec_sessions "
# "where database_id=db_id('%s')" % ident):
# log.info("killing SQL server sesssion %s", row['session_id'])
- # conn.execute("kill %s" % row['session_id'])
+ # conn.exec_driver_sql("kill %s" % row['session_id'])
- conn.execute("drop database %s" % ident)
+ conn.exec_driver_sql("drop database %s" % ident)
log.info("Reaped db: %s", ident)
return True
except exc.DatabaseError as err:
log.info("identifiers in file: %s", ", ".join(idents))
- to_reap = conn.execute(
+ to_reap = conn.exec_driver_sql(
"select d.name from sys.databases as d where name "
"like 'TEST_%' and not exists (select session_id "
"from sys.dm_exec_sessions "
return module
def _get_server_version_info(self, connection):
- vers = connection.scalar("select @@version")
+ vers = connection.exec_driver_sql("select @@version").scalar()
m = re.match(r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers)
if m:
return tuple(int(x) for x in m.group(1, 2, 3, 4))
try:
# "Version of the instance of SQL Server, in the form
# of 'major.minor.build.revision'"
- raw = connection.scalar(
+ raw = connection.exec_driver_sql(
"SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)"
- )
+ ).scalar()
except exc.DBAPIError:
# SQL Server docs indicate this function isn't present prior to
# 2008. Before we had the VARCHAR cast above, pyodbc would also
connection.execute(sql.text("XA COMMIT :xid"), xid=xid)
def do_recover_twophase(self, connection):
- resultset = connection.execute("XA RECOVER")
+ resultset = connection.exec_driver_sql("XA RECOVER")
return [row["data"][0 : row["gtrid_length"]] for row in resultset]
def is_disconnect(self, e, connection, cursor):
raise NotImplementedError()
def _get_default_schema_name(self, connection):
- return connection.execute("SELECT DATABASE()").scalar()
+ return connection.exec_driver_sql("SELECT DATABASE()").scalar()
def has_table(self, connection, table_name, schema=None):
# SHOW TABLE STATUS LIKE and SHOW TABLES LIKE do not function properly
try:
rs = connection.execution_options(
skip_user_error_events=True
- ).execute(st)
+ ).exec_driver_sql(st)
have = rs.fetchone() is not None
rs.close()
return have
@reflection.cache
def get_schema_names(self, connection, **kw):
- rp = connection.execute("SHOW schemas")
+ rp = connection.exec_driver_sql("SHOW schemas")
return [r[0] for r in rp]
@reflection.cache
charset = self._connection_charset
if self.server_version_info < (5, 0, 2):
- rp = connection.execute(
+ rp = connection.exec_driver_sql(
"SHOW TABLES FROM %s"
% self.identifier_preparer.quote_identifier(current_schema)
)
row[0] for row in self._compat_fetchall(rp, charset=charset)
]
else:
- rp = connection.execute(
+ rp = connection.exec_driver_sql(
"SHOW FULL TABLES FROM %s"
% self.identifier_preparer.quote_identifier(current_schema)
)
if self.server_version_info < (5, 0, 2):
return self.get_table_names(connection, schema)
charset = self._connection_charset
- rp = connection.execute(
+ rp = connection.exec_driver_sql(
"SHOW FULL TABLES FROM %s"
% self.identifier_preparer.quote_identifier(schema)
)
charset = self._connection_charset
row = self._compat_first(
- connection.execute("SHOW VARIABLES LIKE 'lower_case_table_names'"),
+ connection.execute(
+ sql.text("SHOW VARIABLES LIKE 'lower_case_table_names'")
+ ),
charset=charset,
)
if not row:
pass
else:
charset = self._connection_charset
- rs = connection.execute("SHOW COLLATION")
+ rs = connection.exec_driver_sql("SHOW COLLATION")
for row in self._compat_fetchall(rs, charset):
collations[row[0]] = row[1]
return collations
def _detect_sql_mode(self, connection):
row = self._compat_first(
- connection.execute("SHOW VARIABLES LIKE 'sql_mode'"),
+ connection.exec_driver_sql("SHOW VARIABLES LIKE 'sql_mode'"),
charset=self._connection_charset,
)
try:
rp = connection.execution_options(
skip_user_error_events=True
- ).execute(st)
+ ).exec_driver_sql(st)
except exc.DBAPIError as e:
if self._extract_error_code(e.orig) == 1146:
util.raise_(exc.NoSuchTableError(full_name), replace_context=e)
try:
rp = connection.execution_options(
skip_user_error_events=True
- ).execute(st)
+ ).exec_driver_sql(st)
except exc.DBAPIError as e:
code = self._extract_error_code(e.orig)
if code == 1146:
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
# specific issue w/ the utf8mb4_bin collation and unicode returns
- has_utf8mb4_bin = self.server_version_info > (
- 5,
- ) and connection.scalar(
+ collation = connection.exec_driver_sql(
"show collation where %s = 'utf8mb4' and %s = 'utf8mb4_bin'"
% (
self.identifier_preparer.quote("Charset"),
self.identifier_preparer.quote("Collation"),
)
- )
+ ).scalar()
+ has_utf8mb4_bin = self.server_version_info > (5,) and collation
if has_utf8mb4_bin:
additional_tests = [
sql.collate(
xid.encode(charset)
).decode(charset)
arg = "'%s'" % arg
- connection.execution_options(_oursql_plain_query=True).execute(
+ connection.execution_options(_oursql_plain_query=True).exec_driver_sql(
query % arg
)
except Exception:
pass
- conn.execute("CREATE DATABASE %s CHARACTER SET utf8mb4" % ident)
- conn.execute(
+ conn.exec_driver_sql(
+ "CREATE DATABASE %s CHARACTER SET utf8mb4" % ident
+ )
+ conn.exec_driver_sql(
"CREATE DATABASE %s_test_schema CHARACTER SET utf8mb4" % ident
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE DATABASE %s_test_schema_2 CHARACTER SET utf8mb4" % ident
)
@drop_db.for_db("mysql")
def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
- conn.execute("DROP DATABASE %s_test_schema" % ident)
- conn.execute("DROP DATABASE %s_test_schema_2" % ident)
- conn.execute("DROP DATABASE %s" % ident)
+ conn.exec_driver_sql("DROP DATABASE %s_test_schema" % ident)
+ conn.exec_driver_sql("DROP DATABASE %s_test_schema_2" % ident)
+ conn.exec_driver_sql("DROP DATABASE %s" % ident)
@temp_table_keyword_args.for_db("mysql")
#
# If it's decided that issuing that sort of SQL leaves you SOL, then
# this can prefer the driver value.
- rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
+ rs = connection.exec_driver_sql(
+ "SHOW VARIABLES LIKE 'character_set%%'"
+ )
opts = {row[0]: row[1] for row in self._compat_fetchall(rs)}
for key in ("character_set_connection", "character_set"):
if opts.get(key, None):
if self.server_version_info < (12, 2):
return self.server_version_info
try:
- compat = connection.execute(
+ compat = connection.exec_driver_sql(
"SELECT value FROM v$parameter WHERE name = 'compatible'"
).scalar()
except exc.DBAPIError:
def _get_default_schema_name(self, connection):
return self.normalize_name(
- connection.execute("SELECT USER FROM DUAL").scalar()
+ connection.exec_driver_sql("SELECT USER FROM DUAL").scalar()
)
def _resolve_synonym(
@reflection.cache
def get_schema_names(self, connection, **kw):
s = "SELECT username FROM all_users ORDER BY username"
- cursor = connection.execute(s)
+ cursor = connection.exec_driver_sql(s)
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
# NLS_TERRITORY or formatting behavior of the DB, we opt
# to just look at it
- self._decimal_char = connection.scalar(
+ self._decimal_char = connection.exec_driver_sql(
"select value from nls_session_parameters "
"where parameter = 'NLS_NUMERIC_CHARACTERS'"
- )[0]
+ ).scalar()[0]
if self._decimal_char != ".":
_detect_decimal = self._detect_decimal
_to_decimal = self._to_decimal
# similar, so that the default tablespace is not "system"; reflection will
# fail otherwise
with eng.connect() as conn:
- conn.execute("create user %s identified by xe" % ident)
- conn.execute("create user %s_ts1 identified by xe" % ident)
- conn.execute("create user %s_ts2 identified by xe" % ident)
- conn.execute("grant dba to %s" % (ident,))
- conn.execute("grant unlimited tablespace to %s" % ident)
- conn.execute("grant unlimited tablespace to %s_ts1" % ident)
- conn.execute("grant unlimited tablespace to %s_ts2" % ident)
+ conn.exec_driver_sql("create user %s identified by xe" % ident)
+ conn.exec_driver_sql("create user %s_ts1 identified by xe" % ident)
+ conn.exec_driver_sql("create user %s_ts2 identified by xe" % ident)
+ conn.exec_driver_sql("grant dba to %s" % (ident,))
+ conn.exec_driver_sql("grant unlimited tablespace to %s" % ident)
+ conn.exec_driver_sql("grant unlimited tablespace to %s_ts1" % ident)
+ conn.exec_driver_sql("grant unlimited tablespace to %s_ts2" % ident)
@configure_follower.for_db("oracle")
def _ora_drop_ignore(conn, dbname):
try:
- conn.execute("drop user %s cascade" % dbname)
+ conn.exec_driver_sql("drop user %s cascade" % dbname)
log.info("Reaped db: %s", dbname)
return True
except exc.DatabaseError as err:
log.info("identifiers in file: %s", ", ".join(idents))
- to_reap = conn.execute(
+ to_reap = conn.exec_driver_sql(
"select u.username from all_users u where username "
"like 'TEST_%' and not exists (select username "
"from v$session where username=u.username)"
were set to include ``test_schema``, and we invoked a table
reflection process as follows::
- >>> from sqlalchemy import Table, MetaData, create_engine
+ >>> from sqlalchemy import Table, MetaData, create_engine, text
>>> engine = create_engine("postgresql://scott:tiger@localhost/test")
>>> with engine.connect() as conn:
- ... conn.execute("SET search_path TO test_schema, public")
+ ... conn.execute(text("SET search_path TO test_schema, public"))
... meta = MetaData()
... referring = Table('referring', meta,
... autoload=True, autoload_with=conn)
:meth:`.MetaData.reflect`::
>>> with engine.connect() as conn:
- ... conn.execute("SET search_path TO test_schema, public")
+ ... conn.execute(text("SET search_path TO test_schema, public"))
... meta = MetaData()
... referring = Table('referring', meta, autoload=True,
... autoload_with=conn,
# http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689
self.supports_smallserial = self.server_version_info >= (9, 2)
+ std_string = connection.exec_driver_sql(
+ "show standard_conforming_strings"
+ ).scalar()
self._backslash_escapes = (
- self.server_version_info < (8, 2)
- or connection.scalar("show standard_conforming_strings") == "off"
+ self.server_version_info < (8, 2) or std_string == "off"
)
self._supports_create_index_concurrently = (
self.do_begin(connection.connection)
def do_prepare_twophase(self, connection, xid):
- connection.execute("PREPARE TRANSACTION '%s'" % xid)
+ connection.exec_driver_sql("PREPARE TRANSACTION '%s'" % xid)
def do_rollback_twophase(
self, connection, xid, is_prepared=True, recover=False
# context when committing recoverable transactions
# Must find out a way how to make the dbapi not
# open a transaction.
- connection.execute("ROLLBACK")
- connection.execute("ROLLBACK PREPARED '%s'" % xid)
- connection.execute("BEGIN")
+ connection.exec_driver_sql("ROLLBACK")
+ connection.exec_driver_sql("ROLLBACK PREPARED '%s'" % xid)
+ connection.exec_driver_sql("BEGIN")
self.do_rollback(connection.connection)
else:
self.do_rollback(connection.connection)
):
if is_prepared:
if recover:
- connection.execute("ROLLBACK")
- connection.execute("COMMIT PREPARED '%s'" % xid)
- connection.execute("BEGIN")
+ connection.exec_driver_sql("ROLLBACK")
+ connection.exec_driver_sql("COMMIT PREPARED '%s'" % xid)
+ connection.exec_driver_sql("BEGIN")
self.do_rollback(connection.connection)
else:
self.do_commit(connection.connection)
return [row[0] for row in resultset]
def _get_default_schema_name(self, connection):
- return connection.scalar("select current_schema()")
+ return connection.exec_driver_sql("select current_schema()").scalar()
def has_schema(self, connection, schema):
query = (
return bool(cursor.scalar())
def _get_server_version_info(self, connection):
- v = connection.execute("select version()").scalar()
+ v = connection.exec_driver_sql("select version()").scalar()
m = re.match(
r".*(?:PostgreSQL|EnterpriseDB) "
r"(\d+)\.?(\d+)?(?:\.(\d+))?(?:\.\d+)?(?:devel|beta)?",
except Exception:
pass
if not template_db:
- template_db = conn.scalar("select current_database()")
+ template_db = conn.exec_driver_sql(
+ "select current_database()"
+ ).scalar()
attempt = 0
while True:
try:
- conn.execute(
+ conn.exec_driver_sql(
"CREATE DATABASE %s TEMPLATE %s" % (ident, template_db)
)
except exc.OperationalError as err:
),
dname=ident,
)
- conn.execute("DROP DATABASE %s" % ident)
+ conn.exec_driver_sql("DROP DATABASE %s" % ident)
@temp_table_keyword_args.for_db("postgresql")
eng = create_engine("sqlite://")
conn = eng.connect()
- conn.execute("create table x (a integer, b integer)")
- conn.execute("insert into x (a, b) values (1, 1)")
- conn.execute("insert into x (a, b) values (2, 2)")
+ conn.exec_driver_sql("create table x (a integer, b integer)")
+ conn.exec_driver_sql("insert into x (a, b) values (1, 1)")
+ conn.exec_driver_sql("insert into x (a, b) values (2, 2)")
- result = conn.execute("select x.a, x.b from x")
+ result = conn.exec_driver_sql("select x.a, x.b from x")
assert result.keys() == ["a", "b"]
- result = conn.execute('''
+ result = conn.exec_driver_sql('''
select x.a, x.b from x where a=1
union
select x.a, x.b from x where a=2
the ``sqlite_raw_colnames`` execution option may be provided, either on a
per-:class:`.Connection` basis::
- result = conn.execution_options(sqlite_raw_colnames=True).execute('''
+ result = conn.execution_options(sqlite_raw_colnames=True).exec_driver_sql('''
select x.a, x.b from x where a=1
union
select x.a, x.b from x where a=2
@reflection.cache
def get_schema_names(self, connection, **kw):
s = "PRAGMA database_list"
- dl = connection.execute(s)
+ dl = connection.exec_driver_sql(s)
return [db[1] for db in dl if db[1] != "temp"]
s = ("SELECT name FROM %s " "WHERE type='table' ORDER BY name") % (
master,
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
return [row[0] for row in rs]
@reflection.cache
"SELECT name FROM sqlite_temp_master "
"WHERE type='table' ORDER BY name "
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
return [row[0] for row in rs]
"SELECT name FROM sqlite_temp_master "
"WHERE type='view' ORDER BY name "
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
return [row[0] for row in rs]
s = ("SELECT name FROM %s " "WHERE type='view' ORDER BY name") % (
master,
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
return [row[0] for row in rs]
master,
view_name,
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
else:
try:
s = (
"WHERE name = '%s' "
"AND type='view'"
) % view_name
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
except exc.DBAPIError:
s = (
"SELECT sql FROM sqlite_master WHERE name = '%s' "
"AND type='view'"
) % view_name
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
result = rs.fetchall()
if result:
"AND type = 'table'"
% {"schema": schema_expr, "table": table_name}
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
except exc.DBAPIError:
s = (
"SELECT sql FROM %(schema)ssqlite_master "
"AND type = 'table'"
% {"schema": schema_expr, "table": table_name}
)
- rs = connection.execute(s)
+ rs = connection.exec_driver_sql(s)
return rs.scalar()
def _get_table_pragma(self, connection, pragma, table_name, schema=None):
qtable = quote(table_name)
for statement in statements:
statement = "%s%s(%s)" % (statement, pragma, qtable)
- cursor = connection.execute(statement)
+ cursor = connection.exec_driver_sql(statement)
if not cursor._soft_closed:
# work around SQLite issue whereby cursor.description
# is blank when PRAGMA returns no rows:
conn = super(SQLiteDialect_pysqlcipher, self).connect(
*cargs, **cparams
)
- conn.execute('pragma key="%s"' % passphrase)
+ conn.exec_driver_sql('pragma key="%s"' % passphrase)
for prag, value in pragmas.items():
if value is not None:
- conn.execute('pragma %s="%s"' % (prag, value))
+ conn.exec_driver_sql('pragma %s="%s"' % (prag, value))
return conn
@event.listens_for(engine, "begin")
def do_begin(conn):
# emit our own BEGIN
- conn.execute("BEGIN")
+ conn.exec_driver_sql("BEGIN")
Above, we intercept a new pysqlite connection and disable any transactional
integration. Then, at the point at which SQLAlchemy knows that transaction
@event.listens_for(engine, "begin")
def do_begin(conn):
- conn.execute("BEGIN EXCLUSIVE")
+ conn.exec_driver_sql("BEGIN EXCLUSIVE")
.. seealso::
cursor.execute(statement, param)
def _get_server_version_info(self, connection):
- vers = connection.scalar("select @@version_number")
+ vers = connection.exec_driver_sql("select @@version_number").scalar()
# i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
# (12, 5, 0, 0)
return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
with engine.connect() as conn:
conn.detach()
- conn.execute("SET search_path TO schema1, schema2")
+ conn.execute(text("SET search_path TO schema1, schema2"))
# work with connection
:param object: The statement to be executed. May be
one of:
- * a plain string
+ * a plain string (deprecated)
* any :class:`.ClauseElement` construct that is also
a subclass of :class:`.Executable`, such as a
:func:`~.expression.select` construct
* a :class:`.DefaultGenerator` object
* a :class:`.Compiled` object
+ .. deprecated:: 2.0 passing a string to :meth:`.Connection.execute` is
+ deprecated and will be removed in version 2.0. Use the
+ :func:`~.expression.text` construct with
+ :meth:`.Connection.execute`, or the
+ :meth:`.Connection.exec_driver_sql` method to invoke a driver-level
+ SQL string.
+
:param \*multiparams/\**params: represent bound parameter
values to be used in the execution. Typically,
the format is either a collection of one or more
To execute a textual SQL statement which uses bound parameters in a
DBAPI-agnostic way, use the :func:`~.expression.text` construct.
+ .. deprecated:: 2.0 use of tuple or scalar positional parameters
+ is deprecated. All params should be dicts or sequences of dicts.
+ Use :meth:`.exec_driver_sql` to execute a plain string with
+ tuple or scalar positional parameters.
+
"""
if isinstance(object_, util.string_types[0]):
- return self._execute_text(object_, multiparams, params)
+ util.warn_deprecated_20(
+ "Passing a string to Connection.execute() is "
+ "deprecated and will be removed in version 2.0. Use the "
+ "text() construct, "
+ "or the Connection.exec_driver_sql() method to invoke a "
+ "driver-level SQL string."
+ )
+ distilled_params = _distill_params(multiparams, params)
+ return self._exec_driver_sql_distilled(object_, distilled_params)
try:
meth = object_._execute_on_connection
except AttributeError as err:
)
return ret
- def _execute_text(self, statement, multiparams, params):
- """Execute a string SQL statement."""
+ def _exec_driver_sql_distilled(self, statement, parameters):
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_execute:
statement, multiparams, params = fn(
- self, statement, multiparams, params
+ self, statement, parameters, {}
)
dialect = self.dialect
- parameters = _distill_params(multiparams, params)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_statement,
parameters,
)
if self._has_events or self.engine._has_events:
- self.dispatch.after_execute(
- self, statement, multiparams, params, ret
- )
+ self.dispatch.after_execute(self, statement, parameters, {})
return ret
+ def exec_driver_sql(self, statement, parameters=None):
+ r"""Executes a SQL statement construct and returns a
+ :class:`.ResultProxy`.
+
+ :param statement: The statement str to be executed. Bound parameters
+ must use the underlying DBAPI's paramstyle, such as "qmark",
+ "pyformat", "format", etc.
+
+ :param parameters: represent bound parameter values to be used in the
+ execution. The format is one of: a dictionary of named parameters,
+ a tuple of positional parameters, or a list containing either
+ dictionaries or tuples for multiple-execute support.
+
+ E.g. multiple dictionaries::
+
+
+ conn.exec_driver_sql(
+ "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
+ [{"id":1, "value":"v1"}, {"id":2, "value":"v2"}]
+ )
+
+ Single dictionary::
+
+ conn.exec_driver_sql(
+ "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
+ dict(id=1, value="v1")
+ )
+
+ Single tuple::
+
+ conn.exec_driver_sql(
+ "INSERT INTO table (id, value) VALUES (?, ?)",
+ (1, 'v1')
+ )
+
+ .. seealso::
+
+ :pep:`249`
+
+ """
+
+ if isinstance(parameters, list) and parameters:
+ if not isinstance(parameters[0], (dict, tuple)):
+ raise exc.ArgumentError(
+ "List argument must consist only of tuples or dictionaries"
+ )
+ elif isinstance(parameters, (dict, tuple)):
+ parameters = [parameters]
+
+ return self._exec_driver_sql_distilled(statement, parameters or ())
+
def _execute_context(
self, dialect, constructor, statement, parameters, *args
):
e.g.::
def do_something(conn, x, y):
- conn.execute("some statement", {'x':x, 'y':y})
+ conn.execute(text("some statement"), {'x':x, 'y':y})
conn.transaction(do_something, 5, 10)
be used with :meth:`.Connection.begin`::
with conn.begin():
- conn.execute("some statement", {'x':5, 'y':10})
+ conn.execute(text("some statement"), {'x':5, 'y':10})
As well as with :meth:`.Engine.begin`::
with engine.begin() as conn:
- conn.execute("some statement", {'x':5, 'y':10})
+ conn.execute(text("some statement"), {'x':5, 'y':10})
.. seealso::
engine = create_engine("postgresql://scott:tiger@localhost/test")
connection = engine.connect()
trans = connection.begin()
- connection.execute("insert into x (a, b) values (1, 2)")
+ connection.execute(text("insert into x (a, b) values (1, 2)"))
trans.commit()
The object provides :meth:`.rollback` and :meth:`.commit`
:meth:`.Connection.begin` method::
with connection.begin():
- connection.execute("insert into x (a, b) values (1, 2)")
+ connection.execute(text("insert into x (a, b) values (1, 2)"))
The Transaction object is **not** threadsafe.
E.g.::
with engine.begin() as conn:
- conn.execute("insert into table (x, y, z) values (1, 2, 3)")
- conn.execute("my_special_procedure(5)")
+ conn.execute(
+ text("insert into table (x, y, z) values (1, 2, 3)")
+ )
+ conn.execute(text("my_special_procedure(5)"))
Upon successful operation, the :class:`.Transaction`
is committed. If an error is raised, the :class:`.Transaction`
e.g.::
def do_something(conn, x, y):
- conn.execute("some statement", {'x':x, 'y':y})
+ conn.execute(text("some statement"), {'x':x, 'y':y})
engine.transaction(do_something, 5, 10)
be used with :meth:`.Engine.begin`::
with engine.begin() as conn:
- conn.execute("some statement", {'x':5, 'y':10})
+ conn.execute(text("some statement"), {'x':5, 'y':10})
.. seealso::
"used to return a scalar result.",
)
def scalar(self, statement, *multiparams, **params):
+ """Executes and returns the first column of the first row.
+
+ The underlying result/cursor is closed after execution.
+ """
return self.execute(statement, *multiparams, **params).scalar()
def _execute_clauseelement(self, elem, multiparams=None, params=None):
return self
def _init_legacy(self, bind):
- if hasattr(bind, "engine"):
+ if hasattr(bind, "exec_driver_sql"):
self._init_connection(bind)
else:
self._init_engine(bind)
result = conn.execution_options(
stream_results=True, max_row_buffer=50
- ).execute("select * from table")
+ ).execute(text("select * from table"))
.. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
def my_before_insert_listener(mapper, connection, target):
# execute a stored procedure upon INSERT,
# apply the value to the row to be inserted
- target.calculated_value = connection.scalar(
- "select my_special_function(%d)"
- % target.special_number)
+ target.calculated_value = connection.execute(
+ text("select my_special_function(%d)" % target.special_number)
+ ).scalar()
# associate the listener function with SomeClass,
# to execute during the "before_insert" hook
E.g.::
- >>> row = engine.execute("select * from table where a=1 and b=2").\
-first()
+ >>> row = engine.execute(\
+ text("select * from table where a=1 and b=2")\
+ ).first()
>>> identity_key(MyClass, row=row)
(<class '__main__.MyClass'>, (1, 2), None)
s.__dict__ = self.__dict__.copy()
return s
+
+class HasCompileState(Generative):
+ """A class that has a :class:`.CompileState` associated with it."""
+
+ _compile_state_factory = CompileState._create
+
+ _compile_state_plugin = None
+
+
+class Executable(Generative):
+ """Mark a ClauseElement as supporting execution.
+
+ :class:`.Executable` is a superclass for all "statement" types
+ of objects, including :func:`select`, :func:`delete`, :func:`update`,
+ :func:`insert`, :func:`text`.
+
+ """
+
+ supports_execution = True
+ _execution_options = util.immutabledict()
+ _bind = None
+
def options(self, *options):
"""Apply options to this statement.
"""
self._options += options
-
-class HasCompileState(Generative):
- """A class that has a :class:`.CompileState` associated with it."""
-
- _compile_state_factory = CompileState._create
-
- _compile_state_plugin = None
-
-
-class Executable(Generative):
- """Mark a ClauseElement as supporting execution.
-
- :class:`.Executable` is a superclass for all "statement" types
- of objects, including :func:`select`, :func:`delete`, :func:`update`,
- :func:`insert`, :func:`text`.
-
- """
-
- supports_execution = True
- _execution_options = util.immutabledict()
- _bind = None
-
@_generative
def execution_options(self, **kw):
""" Set non-SQL options for the statement which take effect during
some_table = Table('some_table', m, Column('data', Integer))
def after_create(target, connection, **kw):
- connection.execute("ALTER TABLE %s SET name=foo_%s" %
- (target.name, target.name))
+ connection.execute(text(
+ "ALTER TABLE %s SET name=foo_%s" % (target.name, target.name)
+ ))
event.listen(some_table, "after_create", after_create)
from ... import String
from ... import testing
from ... import text
+from ... import util
class RowFetchTest(fixtures.TablesTest):
):
engine = self._fixture(engine_ss_arg)
with engine.begin() as conn:
- result = conn.execute(statement)
+ if isinstance(statement, util.string_types):
+ result = conn.exec_driver_sql(statement)
+ else:
+ result = conn.execute(statement)
eq_(self._is_server_side(result.cursor), cursor_ss_status)
result.close()
result = (
engine.connect()
.execution_options(stream_results=True)
- .execute("select 1")
+ .exec_driver_sql("select 1")
)
assert self._is_server_side(result.cursor)
def test_raw_sql_rowcount(self, connection):
# test issue #3622, make sure eager rowcount is called for text
- result = connection.execute(
+ result = connection.exec_driver_sql(
"update employees set department='Z' where department='C'"
)
eq_(result.rowcount, 3)
def _assert_result(self, select, result, params=()):
eq_(config.db.execute(select, params).fetchall(), result)
+ def _assert_result_str(self, select, result, params=()):
+ conn = config.db.connect(close_with_result=True)
+ eq_(conn.exec_driver_sql(select, params).fetchall(), result)
+
def test_simple_limit(self):
table = self.tables.some_table
self._assert_result(
)
sql = str(sql)
- self._assert_result(sql, [(2, 2, 3), (3, 3, 4)])
+ self._assert_result_str(sql, [(2, 2, 3), (3, 3, 4)])
@testing.requires.bound_limit_offset
def test_bound_limit(self):
)
)
- eq_(conn.scalar(literal_sql), expected)
+ eq_(conn.exec_driver_sql(literal_sql).scalar(), expected)
def test_string_cast_crit_spaces_in_key(self):
name = self.tables.data_table.c.name
)
# warm up type caches
- t.select().execute().fetchall()
- t2.select().execute().fetchall()
- testing.db.execute(
- "SELECT %s FROM table1"
- % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
- ).fetchall()
- testing.db.execute(
- "SELECT %s FROM table2"
- % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
- ).fetchall()
+ with testing.db.connect() as conn:
+ conn.execute(t.select()).fetchall()
+ conn.execute(t2.select()).fetchall()
+ conn.exec_driver_sql(
+ "SELECT %s FROM table1"
+ % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
+ ).fetchall()
+ conn.exec_driver_sql(
+ "SELECT %s FROM table2"
+ % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
+ ).fetchall()
def teardown(self):
metadata.drop_all()
stmt = "SELECT %s FROM table1" % (
", ".join("field%d" % fnum for fnum in range(NUM_FIELDS))
)
- [tuple(row) for row in testing.db.execute(stmt).fetchall()]
+ with testing.db.connect() as conn:
+ [tuple(row) for row in conn.exec_driver_sql(stmt).fetchall()]
@profiling.function_call_count(variance=0.10)
def test_raw_unicode(self):
stmt = "SELECT %s FROM table2" % (
", ".join("field%d" % fnum for fnum in range(NUM_FIELDS))
)
- [tuple(row) for row in testing.db.execute(stmt).fetchall()]
+ with testing.db.connect() as conn:
+ [tuple(row) for row in conn.exec_driver_sql(stmt).fetchall()]
def test_contains_doesnt_compile(self):
row = t.select().execute().first()
e = create_engine("sqlite://")
c = e.connect()
# ensure initial connect activities complete
- c.execute("select 1")
+ c.exec_driver_sql("select 1")
@profiling.function_call_count()
def go():
- c.execute("select 1")
+ c.exec_driver_sql("select 1")
try:
go()
# create an engine without any instrumentation.
e = create_engine("sqlite://")
# ensure initial connect activities complete
- e.execute("select 1")
+
+ with e.connect() as conn:
+ conn.exec_driver_sql("select 1")
@profiling.function_call_count()
def go():
- e.execute("select 1")
+ with e.connect() as conn:
+ conn.exec_driver_sql("select 1")
go()
class VersionDetectionTest(fixtures.TestBase):
- def test_pymssql_version(self):
+ @testing.fixture
+ def mock_conn_scalar(self):
+ return lambda text: Mock(
+ exec_driver_sql=Mock(
+ return_value=Mock(scalar=Mock(return_value=text))
+ )
+ )
+
+ def test_pymssql_version(self, mock_conn_scalar):
dialect = pymssql.MSDialect_pymssql()
for vers in [
"Microsoft SQL Azure (RTM) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation",
]:
- conn = Mock(scalar=Mock(return_value=vers))
+ conn = mock_conn_scalar(vers)
eq_(dialect._get_server_version_info(conn), (11, 0, 9216, 62))
- def test_pyodbc_version_productversion(self):
+ def test_pyodbc_version_productversion(self, mock_conn_scalar):
dialect = pyodbc.MSDialect_pyodbc()
- conn = Mock(scalar=Mock(return_value="11.0.9216.62"))
+ conn = mock_conn_scalar("11.0.9216.62")
eq_(dialect._get_server_version_info(conn), (11, 0, 9216, 62))
def test_pyodbc_version_fallback(self):
("Not SQL Server Version 10.5", (5,)),
]:
conn = Mock(
- scalar=Mock(
- side_effect=exc.DBAPIError("stmt", "params", None)
+ exec_driver_sql=Mock(
+ return_value=Mock(
+ scalar=Mock(
+ side_effect=exc.DBAPIError("stmt", "params", None)
+ )
+ )
),
connection=Mock(getinfo=Mock(return_value=vers)),
)
with testing.db.connect() as c:
c.execution_options(isolation_level=value)
- c.execute("SELECT TOP 10 * FROM test")
+ c.exec_driver_sql("SELECT TOP 10 * FROM test")
eq_(
testing.db.dialect.get_isolation_level(c.connection), value
)
meta.create_all()
con = testing.db.connect()
- con.execute(
+ con.exec_driver_sql(
"""create trigger paj on t1 for insert as
insert into t2 (descr) select descr from inserted"""
)
finally:
tr.commit()
- con.execute("""drop trigger paj""")
+ con.exec_driver_sql("""drop trigger paj""")
meta.drop_all()
@testing.provide_metadata
)
meta.bind = eng
con = eng.connect()
- con.execute("create schema paj")
+ con.exec_driver_sql("create schema paj")
@event.listens_for(meta, "after_drop")
def cleanup(target, connection, **kw):
- connection.execute("drop schema paj")
+ connection.exec_driver_sql("drop schema paj")
tbl = Table(
"test", meta, Column("id", Integer, primary_key=True), schema="paj"
eng = engines.testing_engine(options=dict(legacy_schema_aliasing=True))
meta.bind = eng
con = eng.connect()
- con.execute("create schema paj")
+ con.exec_driver_sql("create schema paj")
@event.listens_for(meta, "after_drop")
def cleanup(target, connection, **kw):
- connection.execute("drop schema paj")
+ connection.exec_driver_sql("drop schema paj")
tbl = Table(
"test", meta, Column("id", Integer, primary_key=True), schema="paj"
)
meta.bind = eng
con = eng.connect()
- con.execute("create schema paj")
+ con.exec_driver_sql("create schema paj")
@event.listens_for(meta, "after_drop")
def cleanup(target, connection, **kw):
- connection.execute("drop schema paj")
+ connection.exec_driver_sql("drop schema paj")
tbl = Table(
"test", meta, Column("id", Integer, primary_key=True), schema="paj"
eng = engines.testing_engine(options=dict(legacy_schema_aliasing=True))
meta.bind = eng
con = eng.connect()
- con.execute("create schema paj")
+ con.exec_driver_sql("create schema paj")
@event.listens_for(meta, "after_drop")
def cleanup(target, connection, **kw):
- connection.execute("drop schema paj")
+ connection.exec_driver_sql("drop schema paj")
tbl = Table(
"test", meta, Column("id", Integer, primary_key=True), schema="paj"
try:
connection = testing.db.connect()
try:
- connection.execute("CREATE FULLTEXT CATALOG Catalog AS " "DEFAULT")
+ connection.exec_driver_sql(
+ "CREATE FULLTEXT CATALOG Catalog AS " "DEFAULT"
+ )
return False
except Exception:
return True
def teardown_class(cls):
metadata.drop_all()
connection = testing.db.connect()
- connection.execute("DROP FULLTEXT CATALOG Catalog")
+ connection.exec_driver_sql("DROP FULLTEXT CATALOG Catalog")
connection.close()
def test_expression(self):
@testing.provide_metadata
def test_skip_types(self):
metadata = self.metadata
- testing.db.execute(
- """
- create table foo (id integer primary key, data xml)
- """
- )
+ with testing.db.connect() as c:
+ c.exec_driver_sql(
+ "create table foo (id integer primary key, data xml)"
+ )
with mock.patch.object(
testing.db.dialect, "ischema_names", {"int": mssql.INTEGER}
):
)
metadata.create_all()
- dbname = testing.db.scalar("select db_name()")
- owner = testing.db.scalar("SELECT user_name()")
+ with testing.db.connect() as c:
+ dbname = c.exec_driver_sql("select db_name()").scalar()
+ owner = c.exec_driver_sql("SELECT user_name()").scalar()
referred_schema = "%(dbname)s.%(owner)s" % {
"dbname": dbname,
"owner": owner,
schema, owner = base._owner_plus_db(dialect, identifier)
mock_connection = mock.Mock(
- dialect=dialect, scalar=mock.Mock(return_value="my_db")
+ dialect=dialect,
+ exec_driver_sql=mock.Mock(
+ return_value=mock.Mock(scalar=mock.Mock(return_value="my_db"))
+ ),
)
mock_lambda = mock.Mock()
base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar")
- eq_(mock_connection.mock_calls, [mock.call.scalar("select db_name()")])
+ eq_(
+ mock_connection.mock_calls,
+ [mock.call.exec_driver_sql("select db_name()")],
+ )
+ eq_(
+ mock_connection.exec_driver_sql.return_value.mock_calls,
+ [mock.call.scalar()],
+ ),
eq_(mock_lambda.mock_calls, [mock.call("x", y="bar")])
def test_owner_database_pairs_switch_for_different_db(self):
schema, owner = base._owner_plus_db(dialect, identifier)
mock_connection = mock.Mock(
- dialect=dialect, scalar=mock.Mock(return_value="my_db")
+ dialect=dialect,
+ exec_driver_sql=mock.Mock(
+ return_value=mock.Mock(scalar=mock.Mock(return_value="my_db"))
+ ),
)
mock_lambda = mock.Mock()
base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar")
eq_(
mock_connection.mock_calls,
[
- mock.call.scalar("select db_name()"),
- mock.call.execute("use my_other_db"),
- mock.call.execute("use my_db"),
+ mock.call.exec_driver_sql("select db_name()"),
+ mock.call.exec_driver_sql("use my_other_db"),
+ mock.call.exec_driver_sql("use my_db"),
],
+ eq_(
+ mock_connection.exec_driver_sql.return_value.mock_calls,
+ [mock.call.scalar()],
+ ),
)
eq_(mock_lambda.mock_calls, [mock.call("x", y="bar")])
mock_connection = mock.Mock(
dialect=dialect,
- scalar=mock.Mock(return_value="Some ] Database"),
+ exec_driver_sql=mock.Mock(
+ return_value=mock.Mock(
+ scalar=mock.Mock(return_value="Some ] Database")
+ )
+ ),
)
mock_lambda = mock.Mock()
base._switch_db(schema, mock_connection, mock_lambda, "x", y="bar")
eq_(
mock_connection.mock_calls,
[
- mock.call.scalar("select db_name()"),
- mock.call.execute(use_stmt),
- mock.call.execute("use [Some Database]"),
+ mock.call.exec_driver_sql("select db_name()"),
+ mock.call.exec_driver_sql(use_stmt),
+ mock.call.exec_driver_sql("use [Some Database]"),
],
)
+ eq_(
+ mock_connection.exec_driver_sql.return_value.mock_calls,
+ [mock.call.scalar()],
+ )
eq_(mock_lambda.mock_calls, [mock.call("x", y="bar")])
with testing.db.connect() as conn:
conn.execute(t.insert().values(data="foo"))
- last_ts_1 = conn.scalar("SELECT @@DBTS")
+ last_ts_1 = conn.exec_driver_sql("SELECT @@DBTS").scalar()
if convert_int:
last_ts_1 = int(codecs.encode(last_ts_1, "hex"), 16)
conn.execute(
t.update().values(data="bar").where(t.c.data == "foo")
)
- last_ts_2 = conn.scalar("SELECT @@DBTS")
+ last_ts_2 = conn.exec_driver_sql("SELECT @@DBTS").scalar()
if convert_int:
last_ts_2 = int(codecs.encode(last_ts_2, "hex"), 16)
statement = "SELECT 1 FROM DUAL WHERE 1=0"
return real_exec(self, statement, *args, **kw)
- real_exec = engine._connection_cls._execute_text
+ real_exec = engine._connection_cls.exec_driver_sql
with mock.patch.object(
- engine._connection_cls, "_execute_text", my_execute
+ engine._connection_cls, "exec_driver_sql", my_execute
):
with expect_warnings(
"Could not retrieve SQL_MODE; please ensure the "
c = testing.db.connect().execution_options(
isolation_level="AUTOCOMMIT"
)
- assert c.execute("SELECT @@autocommit;").scalar()
+ assert c.exec_driver_sql("SELECT @@autocommit;").scalar()
c = c.execution_options(isolation_level="READ COMMITTED")
- assert not c.execute("SELECT @@autocommit;").scalar()
+ assert not c.exec_driver_sql("SELECT @@autocommit;").scalar()
def test_isolation_level(self):
values = [
@contextlib.contextmanager
def run_test(self):
connection = testing.db.connect()
- connection.execute("set innodb_lock_wait_timeout=1")
+ connection.exec_driver_sql("set innodb_lock_wait_timeout=1")
main_trans = connection.begin()
try:
yield Session(bind=connection)
def _assert_a_is_locked(self, should_be_locked):
A = self.classes.A
with testing.db.begin() as alt_trans:
- alt_trans.execute("set innodb_lock_wait_timeout=1")
+ alt_trans.exec_driver_sql("set innodb_lock_wait_timeout=1")
# set x/y > 10
try:
alt_trans.execute(update(A).values(x=15, y=19))
def _assert_b_is_locked(self, should_be_locked):
B = self.classes.B
with testing.db.begin() as alt_trans:
- alt_trans.execute("set innodb_lock_wait_timeout=1")
+ alt_trans.exec_driver_sql("set innodb_lock_wait_timeout=1")
# set x/y > 10
try:
alt_trans.execute(update(B).values(x=15, y=19))
self.metadata.create_all()
with testing.db.connect() as conn:
- conn.execute("CREATE VIEW v1 AS SELECT * FROM x")
- conn.execute("CREATE ALGORITHM=MERGE VIEW v2 AS SELECT * FROM x")
- conn.execute(
+ conn.exec_driver_sql("CREATE VIEW v1 AS SELECT * FROM x")
+ conn.exec_driver_sql(
+ "CREATE ALGORITHM=MERGE VIEW v2 AS SELECT * FROM x"
+ )
+ conn.exec_driver_sql(
"CREATE ALGORITHM=UNDEFINED VIEW v3 AS SELECT * FROM x"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x"
)
def cleanup(*arg, **kw):
with testing.db.connect() as conn:
for v in ["v1", "v2", "v3", "v4"]:
- conn.execute("DROP VIEW %s" % v)
+ conn.exec_driver_sql("DROP VIEW %s" % v)
insp = inspect(testing.db)
for v in ["v1", "v2", "v3", "v4"]:
@event.listens_for(self.metadata, "before_drop")
def cleanup(*arg, **kw):
with testing.db.connect() as conn:
- conn.execute("DROP TABLE IF EXISTS test_t1")
- conn.execute("DROP TABLE IF EXISTS test_t2")
- conn.execute("DROP VIEW IF EXISTS test_v")
+ conn.exec_driver_sql("DROP TABLE IF EXISTS test_t1")
+ conn.exec_driver_sql("DROP TABLE IF EXISTS test_t2")
+ conn.exec_driver_sql("DROP VIEW IF EXISTS test_v")
with testing.db.connect() as conn:
- conn.execute("CREATE TABLE test_t1 (id INTEGER)")
- conn.execute("CREATE TABLE test_t2 (id INTEGER)")
- conn.execute("CREATE VIEW test_v AS SELECT id FROM test_t1")
- conn.execute("DROP TABLE test_t1")
+ conn.exec_driver_sql("CREATE TABLE test_t1 (id INTEGER)")
+ conn.exec_driver_sql("CREATE TABLE test_t2 (id INTEGER)")
+ conn.exec_driver_sql(
+ "CREATE VIEW test_v AS SELECT id FROM test_t1"
+ )
+ conn.exec_driver_sql("DROP TABLE test_t1")
m = MetaData()
with expect_warnings(
# this is ideally one table, but older MySQL versions choke
# on the multiple TIMESTAMP columns
-
- row = testing.db.execute(
- "show variables like '%%explicit_defaults_for_timestamp%%'"
- ).first()
+ with testing.db.connect() as c:
+ row = c.exec_driver_sql(
+ "show variables like '%%explicit_defaults_for_timestamp%%'"
+ ).first()
explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true")
reflected = []
):
Table("nn_t%d" % idx, meta) # to allow DROP
- testing.db.execute(
- """
- CREATE TABLE nn_t%d (
- %s
+ with testing.db.connect() as c:
+ c.exec_driver_sql(
+ """
+ CREATE TABLE nn_t%d (
+ %s
+ )
+ """
+ % (idx, ", \n".join(cols))
)
- """
- % (idx, ", \n".join(cols))
- )
reflected.extend(
{
return dt
with testing.db.begin() as conn:
- now = conn.scalar("select now()")
+ now = conn.exec_driver_sql("select now()").scalar()
conn.execute(ts_table.insert(), {"t1": now, "t2": None})
conn.execute(ts_table.insert(), {"t1": None, "t2": None})
conn.execute(ts_table.insert(), {"t2": None})
- new_now = conn.scalar("select now()")
+ new_now = conn.exec_driver_sql("select now()").scalar()
eq_(
[
@classmethod
def setup_class(cls):
- testing.db.execute(
- """
- create or replace procedure foo(x_in IN number, x_out OUT number,
- y_out OUT number, z_out OUT varchar) IS
- retval number;
- begin
- retval := 6;
- x_out := 10;
- y_out := x_in * 15;
- z_out := NULL;
- end;
- """
- )
+ with testing.db.connect() as c:
+ c.exec_driver_sql(
+ """
+create or replace procedure foo(x_in IN number, x_out OUT number,
+y_out OUT number, z_out OUT varchar) IS
+retval number;
+begin
+ retval := 6;
+ x_out := 10;
+ y_out := x_in * 15;
+ z_out := NULL;
+end;
+ """
+ )
def test_out_params(self):
result = testing.db.execute(
@classmethod
def teardown_class(cls):
- testing.db.execute("DROP PROCEDURE foo")
+ testing.db.execute(text("DROP PROCEDURE foo"))
class QuotedBindRoundTripTest(fixtures.TestBase):
dialect = self._dialect((12, 2, 0))
conn = mock.Mock(
- execute=mock.Mock(return_value=mock.Mock(scalar=lambda: "12.2.0"))
+ exec_driver_sql=mock.Mock(
+ return_value=mock.Mock(scalar=lambda: "12.2.0")
+ )
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
dialect = self._dialect((12, 2, 0))
conn = mock.Mock(
- execute=mock.Mock(return_value=mock.Mock(scalar=lambda: "12.2.0"))
+ exec_driver_sql=mock.Mock(
+ return_value=mock.Mock(scalar=lambda: "12.2.0")
+ )
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
dialect = self._dialect((11, 2, 0))
conn = mock.Mock(
- execute=mock.Mock(return_value=mock.Mock(scalar="11.0.0"))
+ exec_driver_sql=mock.Mock(return_value=mock.Mock(scalar="11.0.0"))
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (11, 2, 0))
dialect = self._dialect((12, 2, 0))
conn = mock.Mock(
- execute=mock.Mock(return_value=mock.Mock(scalar=lambda: "11.0.0"))
+ exec_driver_sql=mock.Mock(
+ return_value=mock.Mock(scalar=lambda: "11.0.0")
+ )
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
)
conn = mock.Mock(
- execute=mock.Mock(return_value=mock.Mock(scalar=c122))
+ exec_driver_sql=mock.Mock(return_value=mock.Mock(scalar=c122))
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
return "12.thisiscrap.0"
conn = mock.Mock(
- execute=mock.Mock(return_value=mock.Mock(scalar=c122))
+ exec_driver_sql=mock.Mock(return_value=mock.Mock(scalar=c122))
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
__backend__ = True
def test_basic(self):
- eq_(
- testing.db.execute(
- "/*+ this is a comment */ SELECT 1 FROM " "DUAL"
- ).fetchall(),
- [(1,)],
- )
+ with testing.db.connect() as conn:
+ eq_(
+ conn.exec_driver_sql(
+ "/*+ this is a comment */ SELECT 1 FROM " "DUAL"
+ ).fetchall(),
+ [(1,)],
+ )
def test_sequences_are_integers(self):
seq = Sequence("foo_seq")
from sqlalchemy.testing.schema import Table
+def exec_sql(engine, sql, *args, **kwargs):
+ with engine.connect() as conn:
+ return conn.exec_driver_sql(sql, *args, **kwargs)
+
+
class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = "oracle"
__backend__ = True
% {"test_schema": testing.config.test_schema}
).split(";"):
if stmt.strip():
- testing.db.execute(stmt)
+ exec_sql(testing.db, stmt)
@classmethod
def teardown_class(cls):
% {"test_schema": testing.config.test_schema}
).split(";"):
if stmt.strip():
- testing.db.execute(stmt)
+ exec_sql(testing.db, stmt)
@testing.provide_metadata
def test_create_same_names_explicit_schema(self):
)
def test_reflect_local_to_remote(self):
- testing.db.execute(
+ exec_sql(
+ testing.db,
"CREATE TABLE localtable (id INTEGER "
"PRIMARY KEY, parent_id INTEGER REFERENCES "
"%(test_schema)s.parent(id))"
- % {"test_schema": testing.config.test_schema}
+ % {"test_schema": testing.config.test_schema},
)
try:
meta = MetaData(testing.db)
parent.join(lcl)
).execute().fetchall()
finally:
- testing.db.execute("DROP TABLE localtable")
+ exec_sql(testing.db, "DROP TABLE localtable")
def test_reflect_alt_owner_implicit(self):
meta = MetaData(testing.db)
).execute().fetchall()
def test_reflect_alt_owner_synonyms(self):
- testing.db.execute(
+ exec_sql(
+ testing.db,
"CREATE TABLE localtable (id INTEGER "
"PRIMARY KEY, parent_id INTEGER REFERENCES "
- "%s.ptable(id))" % testing.config.test_schema
+ "%s.ptable(id))" % testing.config.test_schema,
)
try:
meta = MetaData(testing.db)
parent.join(lcl)
).execute().fetchall()
finally:
- testing.db.execute("DROP TABLE localtable")
+ exec_sql(testing.db, "DROP TABLE localtable")
def test_reflect_remote_synonyms(self):
meta = MetaData(testing.db)
__backend__ = True
def setup(self):
- testing.db.execute("create table my_table (id integer)")
- testing.db.execute(
- "create global temporary table my_temp_table (id integer)"
+ exec_sql(testing.db, "create table my_table (id integer)")
+ exec_sql(
+ testing.db,
+ "create global temporary table my_temp_table (id integer)",
)
- testing.db.execute(
- "create table foo_table (id integer) tablespace SYSTEM"
+ exec_sql(
+ testing.db, "create table foo_table (id integer) tablespace SYSTEM"
)
def teardown(self):
- testing.db.execute("drop table my_temp_table")
- testing.db.execute("drop table my_table")
- testing.db.execute("drop table foo_table")
+ exec_sql(testing.db, "drop table my_temp_table")
+ exec_sql(testing.db, "drop table my_table")
+ exec_sql(testing.db, "drop table foo_table")
def test_table_names_no_system(self):
insp = inspect(testing.db)
__backend__ = True
def setup(self):
- testing.db.execute(
+ exec_sql(
+ testing.db,
"""
CREATE TABLE admin_docindex(
token char(20),
TABLESPACE users
PCTTHRESHOLD 20
OVERFLOW TABLESPACE users
- """
+ """,
)
def teardown(self):
- testing.db.execute("drop table admin_docindex")
+ exec_sql(testing.db, "drop table admin_docindex")
def test_reflect_all(self):
m = MetaData(testing.db)
)
metadata.create_all()
- testing.db.execute(
- "CREATE INDEX DATA_IDX ON " "TEST_INDEX_REFLECT (UPPER(DATA))"
+ exec_sql(
+ testing.db,
+ "CREATE INDEX DATA_IDX ON " "TEST_INDEX_REFLECT (UPPER(DATA))",
)
m2 = MetaData(testing.db)
Table("test_index_reflect", m2, autoload=True)
def all_tables_compression_missing():
try:
- testing.db.execute("SELECT compression FROM all_tables")
- if "Enterprise Edition" not in testing.db.scalar(
- "select * from v$version"
+ exec_sql(testing.db, "SELECT compression FROM all_tables")
+ if (
+ "Enterprise Edition"
+ not in exec_sql(testing.db, "select * from v$version").scalar()
):
return True
return False
def all_tables_compress_for_missing():
try:
- testing.db.execute("SELECT compress_for FROM all_tables")
- if "Enterprise Edition" not in testing.db.scalar(
- "select * from v$version"
+ exec_sql(testing.db, "SELECT compress_for FROM all_tables")
+ if (
+ "Enterprise Edition"
+ not in exec_sql(testing.db, "select * from v$version").scalar()
):
return True
return False
# when accessing via a different username as we do with the
# multiprocess test suite, so testing here is minimal
with testing.db.connect() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"create table test_table "
"(id integer primary key, data varchar2(50))"
)
- conn.execute(
+ conn.exec_driver_sql(
"create synonym test_table_syn "
"for test_table@%s" % cls.dblink
)
@classmethod
def teardown_class(cls):
with testing.db.connect() as conn:
- conn.execute("drop synonym test_table_syn")
- conn.execute("drop table test_table")
+ conn.exec_driver_sql("drop synonym test_table_syn")
+ conn.exec_driver_sql("drop table test_table")
def test_reflection(self):
"""test the resolution of the synonym/dblink. """
from sqlalchemy.util import u
+def exec_sql(engine, sql, *args, **kwargs):
+ with engine.connect() as conn:
+ return conn.exec_driver_sql(sql, *args, **kwargs)
+
+
class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = oracle.OracleDialect()
)
eq_(
- testing.db.execute(
- "select numericcol from t1 order by intcol"
+ exec_sql(
+ testing.db, "select numericcol from t1 order by intcol"
).fetchall(),
[(float("inf"),), (float("-inf"),)],
)
)
eq_(
- testing.db.execute(
- "select numericcol from t1 order by intcol"
+ exec_sql(
+ testing.db, "select numericcol from t1 order by intcol"
).fetchall(),
[(decimal.Decimal("Infinity"),), (decimal.Decimal("-Infinity"),)],
)
eq_(
[
tuple(str(col) for col in row)
- for row in testing.db.execute(
- "select numericcol from t1 order by intcol"
+ for row in exec_sql(
+ testing.db, "select numericcol from t1 order by intcol"
)
],
[("nan",), ("nan",)],
)
eq_(
- testing.db.execute(
- "select numericcol from t1 order by intcol"
+ exec_sql(
+ testing.db, "select numericcol from t1 order by intcol"
).fetchall(),
[(decimal.Decimal("NaN"),), (decimal.Decimal("NaN"),)],
)
stmt = "SELECT idata, ndata, ndata2, nidata, fdata FROM foo"
- row = testing.db.execute(stmt).fetchall()[0]
+ row = exec_sql(testing.db, stmt).fetchall()[0]
eq_(
[type(x) for x in row],
[int, decimal.Decimal, decimal.Decimal, int, float],
(SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata
FROM dual
"""
- row = testing.db.execute(stmt).fetchall()[0]
+ row = exec_sql(testing.db, stmt).fetchall()[0]
eq_(
[type(x) for x in row],
[int, decimal.Decimal, int, int, decimal.Decimal],
)
WHERE ROWNUM >= 0) anon_1
"""
- row = testing.db.execute(stmt).fetchall()[0]
+ row = exec_sql(testing.db, stmt).fetchall()[0]
eq_(
[type(x) for x in row],
[int, decimal.Decimal, int, int, decimal.Decimal],
engine = testing_engine(options=dict(coerce_to_decimal=False))
# raw SQL no longer coerces to decimal
- value = engine.scalar("SELECT 5.66 FROM DUAL")
+ value = exec_sql(engine, "SELECT 5.66 FROM DUAL").scalar()
assert isinstance(value, float)
# explicit typing still *does* coerce to decimal
assert isinstance(value, decimal.Decimal)
# default behavior is raw SQL coerces to decimal
- value = testing.db.scalar("SELECT 5.66 FROM DUAL")
+ value = exec_sql(testing.db, "SELECT 5.66 FROM DUAL").scalar()
assert isinstance(value, decimal.Decimal)
@testing.combinations(
cx_oracle_result = cursor.fetchone()[0]
cursor.close()
- sqla_result = conn.scalar(stmt)
+ sqla_result = conn.exec_driver_sql(stmt).scalar()
eq_(sqla_result, cx_oracle_result)
)
def test_coerce_to_unicode(self):
engine = testing_engine(options=dict(coerce_to_unicode=False))
- value = engine.scalar("SELECT 'hello' FROM DUAL")
+ value = exec_sql(engine, "SELECT 'hello' FROM DUAL").scalar()
assert isinstance(value, util.binary_type)
- value = testing.db.scalar("SELECT 'hello' FROM DUAL")
+ value = exec_sql(testing.db, "SELECT 'hello' FROM DUAL").scalar()
assert isinstance(value, util.text_type)
@testing.provide_metadata
def test_longstring(self):
metadata = MetaData(testing.db)
- testing.db.execute(
+ exec_sql(
+ testing.db,
"""
CREATE TABLE Z_TEST
(
ID NUMERIC(22) PRIMARY KEY,
ADD_USER VARCHAR2(20) NOT NULL
)
- """
+ """,
)
try:
t = Table("z_test", metadata, autoload=True)
t.insert().execute(id=1.0, add_user="foobar")
assert t.select().execute().fetchall() == [(1, "foobar")]
finally:
- testing.db.execute("DROP TABLE Z_TEST")
+ exec_sql(testing.db, "DROP TABLE Z_TEST")
class LOBFetchTest(fixtures.TablesTest):
eq_(row["bindata"], b("this is binary 1"))
def test_lobs_with_convert_raw(self):
- row = testing.db.execute("select data, bindata from z_test").first()
+ row = exec_sql(testing.db, "select data, bindata from z_test").first()
eq_(row["data"], "this is text 1")
eq_(row["bindata"], b("this is binary 1"))
engine = testing_engine(
options=dict(auto_convert_lobs=False, arraysize=1)
)
- result = engine.execute(
- "select id, data, bindata from z_test order by id"
+ result = exec_sql(
+ engine, "select id, data, bindata from z_test order by id"
)
results = result.fetchall()
engine = testing_engine(
options=dict(auto_convert_lobs=True, arraysize=1)
)
- result = engine.execute(
- "select id, data, bindata from z_test order by id"
+ result = exec_sql(
+ engine, "select id, data, bindata from z_test order by id"
)
results = result.fetchall()
{},
),
]:
- test_exp = conn.scalar(stmt, **kw)
+ if isinstance(stmt, util.string_types):
+ test_exp = conn.exec_driver_sql(stmt, kw).scalar()
+ else:
+ test_exp = conn.scalar(stmt, **kw)
eq_(test_exp, exp)
assert type(test_exp) is type(exp)
def test_version_parsing(self):
def mock_conn(res):
return mock.Mock(
- execute=mock.Mock(
+ exec_driver_sql=mock.Mock(
return_value=mock.Mock(scalar=mock.Mock(return_value=res))
)
)
conn = testing.db.connect()
trans = conn.begin()
try:
- conn.execute(
+ conn.exec_driver_sql(
"""
CREATE OR REPLACE FUNCTION note(message varchar) RETURNS integer AS $$
BEGIN
$$ LANGUAGE plpgsql;
"""
)
- conn.execute("SELECT note('hi there')")
- conn.execute("SELECT note('another note')")
+ conn.exec_driver_sql("SELECT note('hi there')")
+ conn.exec_driver_sql("SELECT note('another note')")
finally:
trans.rollback()
finally:
@engines.close_open_connections
def test_client_encoding(self):
c = testing.db.connect()
- current_encoding = c.execute("show client_encoding").fetchone()[0]
+ current_encoding = c.exec_driver_sql(
+ "show client_encoding"
+ ).fetchone()[0]
c.close()
# attempt to use an encoding that's not
e = engines.testing_engine(options={"client_encoding": test_encoding})
c = e.connect()
- new_encoding = c.execute("show client_encoding").fetchone()[0]
+ new_encoding = c.exec_driver_sql("show client_encoding").fetchone()[0]
eq_(new_encoding, test_encoding)
@testing.requires.psycopg2_or_pg8000_compatibility
assert_raises_message(
exc.ProgrammingError,
'prepared transaction with identifier "gilberte" does not exist',
- c.execute,
+ c.exec_driver_sql,
"commit prepared 'gilberte'",
)
seq = Sequence("fooseq")
t = Table("mytable", meta1, Column("col1", Integer, seq))
seq.drop()
- testing.db.execute("CREATE SEQUENCE fooseq")
+ testing.db.execute(text("CREATE SEQUENCE fooseq"))
t.create(checkfirst=True)
@testing.provide_metadata
try:
meta = MetaData(testing.db)
testing.db.execute(
- """
+ text(
+ """
CREATE TABLE speedy_users
(
speedy_user_id SERIAL PRIMARY KEY,
user_password VARCHAR NOT NULL
);
"""
+ )
)
t = Table("speedy_users", meta, autoload=True)
r = t.insert().execute(user_name="user", user_password="lala")
result = t.select().execute().fetchall()
assert result == [(1, "user", "lala")]
finally:
- testing.db.execute("drop table speedy_users")
+ testing.db.execute(text("drop table speedy_users"))
@testing.requires.psycopg2_or_pg8000_compatibility
def test_numeric_raise(self):
'CREATE DOMAIN "SomeSchema"."Quoted.Domain" INTEGER DEFAULT 0',
]:
try:
- con.execute(ddl)
+ con.exec_driver_sql(ddl)
except exc.DBAPIError as e:
if "already exists" not in str(e):
raise e
- con.execute(
+ con.exec_driver_sql(
"CREATE TABLE testtable (question integer, answer " "testdomain)"
)
- con.execute(
+ con.exec_driver_sql(
"CREATE TABLE test_schema.testtable(question "
"integer, answer test_schema.testdomain, anything "
"integer)"
)
- con.execute(
+ con.exec_driver_sql(
"CREATE TABLE crosschema (question integer, answer "
"test_schema.testdomain)"
)
- con.execute("CREATE TABLE enum_test (id integer, data enumdomain)")
+ con.exec_driver_sql(
+ "CREATE TABLE enum_test (id integer, data enumdomain)"
+ )
- con.execute("CREATE TABLE array_test (id integer, data arraydomain)")
+ con.exec_driver_sql(
+ "CREATE TABLE array_test (id integer, data arraydomain)"
+ )
- con.execute(
+ con.exec_driver_sql(
"CREATE TABLE quote_test "
'(id integer, data "SomeSchema"."Quoted.Domain")'
)
@classmethod
def teardown_class(cls):
con = testing.db.connect()
- con.execute("DROP TABLE testtable")
- con.execute("DROP TABLE test_schema.testtable")
- con.execute("DROP TABLE crosschema")
- con.execute("DROP TABLE quote_test")
- con.execute("DROP DOMAIN testdomain")
- con.execute("DROP DOMAIN test_schema.testdomain")
- con.execute("DROP TABLE enum_test")
- con.execute("DROP DOMAIN enumdomain")
- con.execute("DROP TYPE testtype")
- con.execute("DROP TABLE array_test")
- con.execute("DROP DOMAIN arraydomain")
- con.execute('DROP DOMAIN "SomeSchema"."Quoted.Domain"')
- con.execute('DROP SCHEMA "SomeSchema"')
+ con.exec_driver_sql("DROP TABLE testtable")
+ con.exec_driver_sql("DROP TABLE test_schema.testtable")
+ con.exec_driver_sql("DROP TABLE crosschema")
+ con.exec_driver_sql("DROP TABLE quote_test")
+ con.exec_driver_sql("DROP DOMAIN testdomain")
+ con.exec_driver_sql("DROP DOMAIN test_schema.testdomain")
+ con.exec_driver_sql("DROP TABLE enum_test")
+ con.exec_driver_sql("DROP DOMAIN enumdomain")
+ con.exec_driver_sql("DROP TYPE testtype")
+ con.exec_driver_sql("DROP TABLE array_test")
+ con.exec_driver_sql("DROP DOMAIN arraydomain")
+ con.exec_driver_sql('DROP DOMAIN "SomeSchema"."Quoted.Domain"')
+ con.exec_driver_sql('DROP SCHEMA "SomeSchema"')
def test_table_is_reflected(self):
metadata = MetaData(testing.db)
eq_(t2.c.id.server_default.arg.text, "nextval('t_id_seq'::regclass)")
r = t2.insert().execute()
eq_(r.inserted_primary_key, [1])
- testing.db.connect().execution_options(autocommit=True).execute(
- "alter table t_id_seq rename to foobar_id_seq"
- )
+ testing.db.connect().execution_options(
+ autocommit=True
+ ).exec_driver_sql("alter table t_id_seq rename to foobar_id_seq")
m3 = MetaData(testing.db)
t3 = Table("t", m3, autoload=True, implicit_returning=False)
eq_(
Column("x", Integer),
)
metadata.create_all()
- testing.db.connect().execution_options(autocommit=True).execute(
- "alter table t alter column id type varchar(50)"
- )
+ testing.db.connect().execution_options(
+ autocommit=True
+ ).exec_driver_sql("alter table t alter column id type varchar(50)")
m2 = MetaData(testing.db)
t2 = Table("t", m2, autoload=True)
eq_(t2.c.id.autoincrement, False)
metadata = self.metadata
Table("t", metadata, Column("id", Integer, primary_key=True))
metadata.create_all()
- testing.db.connect().execution_options(autocommit=True).execute(
- "alter table t rename id to t_id"
- )
+ testing.db.connect().execution_options(
+ autocommit=True
+ ).exec_driver_sql("alter table t rename id to t_id")
m2 = MetaData(testing.db)
t2 = Table("t", m2, autoload=True)
eq_([c.name for c in t2.primary_key], ["t_id"])
conn = testing.db.connect()
conn.detach()
- conn.execute("SET search_path TO test_schema, test_schema_2")
+ conn.exec_driver_sql("SET search_path TO test_schema, test_schema_2")
meta2 = MetaData(bind=conn)
subject = Table(
"subject",
with testing.db.connect() as conn:
conn.detach()
- conn.execute(
+ conn.exec_driver_sql(
"set search_path to test_schema_2, test_schema, public"
)
with testing.db.connect() as conn:
conn.detach()
- conn.execute(
+ conn.exec_driver_sql(
"set search_path to test_schema_2, test_schema, public"
)
meta2 = MetaData(conn)
Column("aname", String(20)),
)
metadata.create_all()
- testing.db.execute(
- """
- create index idx1 on party ((id || name))
- """
- )
- testing.db.execute(
- """
- create unique index idx2 on party (id) where name = 'test'
- """
- )
- testing.db.execute(
- """
- create index idx3 on party using btree
- (lower(name::text), lower(aname::text))
- """
- )
+ with testing.db.connect() as c:
+ c.exec_driver_sql("create index idx1 on party ((id || name))")
+ c.exec_driver_sql(
+ "create unique index idx2 on party (id) where name = 'test'"
+ )
+ c.exec_driver_sql(
+ """
+ create index idx3 on party using btree
+ (lower(name::text), lower(aname::text))
+ """
+ )
def go():
m2 = MetaData(testing.db)
t1.create(conn)
# check ASC, DESC options alone
- conn.execute(
+ conn.exec_driver_sql(
"""
create index idx1 on party
(id, name ASC, aname DESC)
)
# check DESC w/ NULLS options
- conn.execute(
+ conn.exec_driver_sql(
"""
create index idx2 on party
(name DESC NULLS FIRST, aname DESC NULLS LAST)
)
# check ASC w/ NULLS options
- conn.execute(
+ conn.exec_driver_sql(
"""
create index idx3 on party
(name ASC NULLS FIRST, aname ASC NULLS LAST)
)
metadata.create_all()
conn = testing.db.connect().execution_options(autocommit=True)
- conn.execute("CREATE INDEX idx1 ON t (x)")
- conn.execute("ALTER TABLE t RENAME COLUMN x to y")
+ conn.exec_driver_sql("CREATE INDEX idx1 ON t (x)")
+ conn.exec_driver_sql("ALTER TABLE t RENAME COLUMN x to y")
ind = testing.db.dialect.get_indexes(conn, "t", None)
eq_(ind, [{"unique": False, "column_names": ["y"], "name": "idx1"}])
metadata.create_all()
with testing.db.connect().execution_options(autocommit=True) as conn:
- conn.execute("CREATE INDEX idx1 ON t (x) WITH (fillfactor = 50)")
+ conn.exec_driver_sql(
+ "CREATE INDEX idx1 ON t (x) WITH (fillfactor = 50)"
+ )
ind = testing.db.dialect.get_indexes(conn, "t", None)
eq_(
)
metadata.create_all()
with testing.db.connect().execution_options(autocommit=True) as conn:
- conn.execute("CREATE INDEX idx1 ON t USING gin (x)")
+ conn.exec_driver_sql("CREATE INDEX idx1 ON t USING gin (x)")
ind = testing.db.dialect.get_indexes(conn, "t", None)
eq_(
def setup_class(cls):
con = testing.db.connect()
try:
- con.execute(
+ con.exec_driver_sql(
"CREATE DOMAIN int_domain AS INTEGER DEFAULT " "42 NOT NULL"
)
- con.execute("CREATE DOMAIN str_domain AS VARCHAR(255)")
- con.execute("CREATE DOMAIN rem_domain AS BLOB SUB_TYPE TEXT")
- con.execute("CREATE DOMAIN img_domain AS BLOB SUB_TYPE " "BINARY")
+ con.exec_driver_sql("CREATE DOMAIN str_domain AS VARCHAR(255)")
+ con.exec_driver_sql(
+ "CREATE DOMAIN rem_domain AS BLOB SUB_TYPE TEXT"
+ )
+ con.exec_driver_sql(
+ "CREATE DOMAIN img_domain AS BLOB SUB_TYPE " "BINARY"
+ )
except ProgrammingError as e:
if "attempt to store duplicate value" not in str(e):
raise e
- con.execute("""CREATE GENERATOR gen_testtable_id""")
- con.execute(
+ con.exec_driver_sql("""CREATE GENERATOR gen_testtable_id""")
+ con.exec_driver_sql(
"""CREATE TABLE testtable (question int_domain,
answer str_domain DEFAULT 'no answer',
remark rem_domain DEFAULT '',
dt timestamp,
redundant str_domain DEFAULT NULL)"""
)
- con.execute(
+ con.exec_driver_sql(
"ALTER TABLE testtable "
"ADD CONSTRAINT testtable_pk PRIMARY KEY "
"(question)"
)
- con.execute(
+ con.exec_driver_sql(
"CREATE TRIGGER testtable_autoid FOR testtable "
" ACTIVE BEFORE INSERT AS"
" BEGIN"
@classmethod
def teardown_class(cls):
con = testing.db.connect()
- con.execute("DROP TABLE testtable")
- con.execute("DROP DOMAIN int_domain")
- con.execute("DROP DOMAIN str_domain")
- con.execute("DROP DOMAIN rem_domain")
- con.execute("DROP DOMAIN img_domain")
- con.execute("DROP GENERATOR gen_testtable_id")
+ con.exec_driver_sql("DROP TABLE testtable")
+ con.exec_driver_sql("DROP DOMAIN int_domain")
+ con.exec_driver_sql("DROP DOMAIN str_domain")
+ con.exec_driver_sql("DROP DOMAIN rem_domain")
+ con.exec_driver_sql("DROP DOMAIN img_domain")
+ con.exec_driver_sql("DROP GENERATOR gen_testtable_id")
def test_table_is_reflected(self):
from sqlalchemy.types import (
@classmethod
def setup_class(cls):
con = testing.db.connect()
- con.execute(cls.AUTOINC_DM)
- con.execute(cls.MONEY_DM)
- con.execute(cls.NOSI_DM)
- con.execute(cls.RIT_TESORERIA_CAPITOLO_DM)
- con.execute(cls.DEF_ERROR_TB)
- con.execute(cls.DEF_ERROR_NODOM_TB)
+ con.exec_driver_sql(cls.AUTOINC_DM)
+ con.exec_driver_sql(cls.MONEY_DM)
+ con.exec_driver_sql(cls.NOSI_DM)
+ con.exec_driver_sql(cls.RIT_TESORERIA_CAPITOLO_DM)
+ con.exec_driver_sql(cls.DEF_ERROR_TB)
+ con.exec_driver_sql(cls.DEF_ERROR_NODOM_TB)
- con.execute(cls.DOM_ID)
- con.execute(cls.TABLE_A)
- con.execute(cls.TABLE_B)
+ con.exec_driver_sql(cls.DOM_ID)
+ con.exec_driver_sql(cls.TABLE_A)
+ con.exec_driver_sql(cls.TABLE_B)
@classmethod
def teardown_class(cls):
con = testing.db.connect()
- con.execute("DROP TABLE a")
- con.execute("DROP TABLE b")
- con.execute("DROP DOMAIN dom_id")
- con.execute("DROP TABLE def_error_nodom")
- con.execute("DROP TABLE def_error")
- con.execute("DROP DOMAIN rit_tesoreria_capitolo_dm")
- con.execute("DROP DOMAIN nosi_dm")
- con.execute("DROP DOMAIN money_dm")
- con.execute("DROP DOMAIN autoinc_dm")
+ con.exec_driver_sql("DROP TABLE a")
+ con.exec_driver_sql("DROP TABLE b")
+ con.exec_driver_sql("DROP DOMAIN dom_id")
+ con.exec_driver_sql("DROP TABLE def_error_nodom")
+ con.exec_driver_sql("DROP TABLE def_error")
+ con.exec_driver_sql("DROP DOMAIN rit_tesoreria_capitolo_dm")
+ con.exec_driver_sql("DROP DOMAIN nosi_dm")
+ con.exec_driver_sql("DROP DOMAIN money_dm")
+ con.exec_driver_sql("DROP DOMAIN autoinc_dm")
def test_tables_are_reflected_same_way(self):
metadata = MetaData(testing.db)
from sqlalchemy.util import ue
+def exec_sql(engine, sql, *args, **kwargs):
+ conn = engine.connect(close_with_result=True)
+ return conn.exec_driver_sql(sql, *args, **kwargs)
+
+
class TestTypes(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = "sqlite"
)
try:
meta.create_all()
- testing.db.execute(
- "INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');"
+ exec_sql(
+ testing.db,
+ "INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');",
)
- testing.db.execute(
- "INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');"
+ exec_sql(
+ testing.db,
+ "INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');",
)
- testing.db.execute(
- "INSERT INTO bool_table (id, boo) " "VALUES (3, '1');"
+ exec_sql(
+ testing.db,
+ "INSERT INTO bool_table (id, boo) " "VALUES (3, '1');",
)
- testing.db.execute(
- "INSERT INTO bool_table (id, boo) " "VALUES (4, '0');"
+ exec_sql(
+ testing.db,
+ "INSERT INTO bool_table (id, boo) " "VALUES (4, '0');",
)
- testing.db.execute(
- "INSERT INTO bool_table (id, boo) " "VALUES (5, 1);"
+ exec_sql(
+ testing.db,
+ "INSERT INTO bool_table (id, boo) " "VALUES (5, 1);",
)
- testing.db.execute(
- "INSERT INTO bool_table (id, boo) " "VALUES (6, 0);"
+ exec_sql(
+ testing.db,
+ "INSERT INTO bool_table (id, boo) " "VALUES (6, 0);",
)
eq_(
t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
testing.db.execute(
t.insert().values(d=datetime.datetime(2010, 10, 15, 12, 37, 0))
)
- testing.db.execute("insert into t (d) values ('2004-05-21T00:00:00')")
+ exec_sql(
+ testing.db, "insert into t (d) values ('2004-05-21T00:00:00')"
+ )
eq_(
- testing.db.execute("select * from t order by d").fetchall(),
+ exec_sql(testing.db, "select * from t order by d").fetchall(),
[("2004-05-21T00:00:00",), ("2010-10-15T12:37:00",)],
)
eq_(
testing.db.execute(
t.insert().values(d=datetime.datetime(2010, 10, 15, 12, 37, 0))
)
- testing.db.execute("insert into t (d) values ('20040521000000')")
+ exec_sql(testing.db, "insert into t (d) values ('20040521000000')")
eq_(
- testing.db.execute("select * from t order by d").fetchall(),
+ exec_sql(testing.db, "select * from t order by d").fetchall(),
[("20040521000000",), ("20101015123700",)],
)
eq_(
t = Table("t", self.metadata, Column("d", sqlite_date))
self.metadata.create_all(testing.db)
testing.db.execute(t.insert().values(d=datetime.date(2010, 10, 15)))
- testing.db.execute("insert into t (d) values ('20040521')")
+ exec_sql(testing.db, "insert into t (d) values ('20040521')")
eq_(
- testing.db.execute("select * from t order by d").fetchall(),
+ exec_sql(testing.db, "select * from t order by d").fetchall(),
[("20040521",), ("20101015",)],
)
eq_(
t = Table("t", self.metadata, Column("d", sqlite_date))
self.metadata.create_all(testing.db)
testing.db.execute(t.insert().values(d=datetime.date(2010, 10, 15)))
- testing.db.execute("insert into t (d) values ('2004|05|21')")
+ exec_sql(testing.db, "insert into t (d) values ('2004|05|21')")
eq_(
- testing.db.execute("select * from t order by d").fetchall(),
+ exec_sql(testing.db, "select * from t order by d").fetchall(),
[("2004|05|21",), ("2010|10|15",)],
)
eq_(
val INTEGER NOT NULL DEFAULT 0
)"""
try:
- db.execute(table)
+ exec_sql(db, table)
rt = Table("r_defaults", m, autoload=True)
for i, reflected in enumerate(rt.c):
eq_(str(reflected.server_default.arg), expected[i])
finally:
- db.execute("DROP TABLE r_defaults")
+ exec_sql(db, "DROP TABLE r_defaults")
def test_default_reflection_3(self):
db = testing.db
val INTEGER NOT NULL DEFAULT 0
)"""
try:
- db.execute(table)
+ exec_sql(db, table)
m1 = MetaData(db)
t1 = Table("r_defaults", m1, autoload=True)
- db.execute("DROP TABLE r_defaults")
+ exec_sql(db, "DROP TABLE r_defaults")
t1.create()
m2 = MetaData(db)
t2 = Table("r_defaults", m2, autoload=True)
"NOT NULL)",
)
finally:
- db.execute("DROP TABLE r_defaults")
+ exec_sql(db, "DROP TABLE r_defaults")
@testing.provide_metadata
def test_boolean_default(self):
"""Tests autoload of tables created with quoted column names."""
metadata = self.metadata
- testing.db.execute(
+ exec_sql(
+ testing.db,
"""CREATE TABLE "django_content_type" (
"id" integer NOT NULL PRIMARY KEY,
"django_stuff" text NULL
)
- """
+ """,
)
- testing.db.execute(
+ exec_sql(
+ testing.db,
"""
CREATE TABLE "django_admin_log" (
"id" integer NOT NULL PRIMARY KEY,
"object_id" text NULL,
"change_message" text NOT NULL
)
- """
+ """,
)
table1 = Table("django_admin_log", metadata, autoload=True)
table2 = Table("django_content_type", metadata, autoload=True)
"""
metadata = self.metadata
- testing.db.execute(
+ exec_sql(
+ testing.db,
r'''CREATE TABLE """a""" (
"""id""" integer NOT NULL PRIMARY KEY
)
- '''
+ ''',
)
# unfortunately, still can't do this; sqlite quadruples
# up the quotes on the table name here for pragma foreign_key_list
- # testing.db.execute(r'''
+ # exec_sql(testing.db,r'''
# CREATE TABLE """b""" (
# """id""" integer NOT NULL PRIMARY KEY,
# """aid""" integer NULL
eq_(insp.get_schema_names(), ["main", "test_schema"])
# implicitly creates a "temp" schema
- self.conn.execute("select * from sqlite_temp_master")
+ self.conn.exec_driver_sql("select * from sqlite_temp_master")
# we're not including it
insp = inspect(self.conn)
it is and True otherwise."""
try:
- testing.db.execute("CREATE VIRTUAL TABLE t using FTS3;")
- testing.db.execute("DROP TABLE t;")
+ exec_sql(testing.db, "CREATE VIRTUAL TABLE t using FTS3;")
+ exec_sql(testing.db, "DROP TABLE t;")
return False
except Exception:
return True
def setup_class(cls):
global metadata, cattable, matchtable
metadata = MetaData(testing.db)
- testing.db.execute(
+ exec_sql(
+ testing.db,
"""
CREATE VIRTUAL TABLE cattable using FTS3 (
id INTEGER NOT NULL,
description VARCHAR(50),
PRIMARY KEY (id)
)
- """
+ """,
)
cattable = Table("cattable", metadata, autoload=True)
- testing.db.execute(
+ exec_sql(
+ testing.db,
"""
CREATE VIRTUAL TABLE matchtable using FTS3 (
id INTEGER NOT NULL,
category_id INTEGER NOT NULL,
PRIMARY KEY (id)
)
- """
+ """,
)
matchtable = Table("matchtable", metadata, autoload=True)
metadata.create_all()
__only_on__ = "sqlite"
def setup(self):
- testing.db.execute("CREATE TABLE a (id INTEGER PRIMARY KEY)")
+ exec_sql(testing.db, "CREATE TABLE a (id INTEGER PRIMARY KEY)")
# this syntax actually works on other DBs perhaps we'd want to add
# tests to test_reflection
- testing.db.execute(
- "CREATE TABLE b (id INTEGER PRIMARY KEY REFERENCES a)"
+ exec_sql(
+ testing.db, "CREATE TABLE b (id INTEGER PRIMARY KEY REFERENCES a)"
)
def teardown(self):
- testing.db.execute("drop table b")
- testing.db.execute("drop table a")
+ exec_sql(testing.db, "drop table b")
+ exec_sql(testing.db, "drop table a")
def test_reflect_tables_fk_no_colref(self):
meta = MetaData()
@classmethod
def setup_class(cls):
with testing.db.begin() as conn:
- conn.execute('ATTACH %r AS "default"' % conn.engine.url.database)
- conn.execute('CREATE TABLE "default".a (id INTEGER PRIMARY KEY)')
+ conn.exec_driver_sql(
+ 'ATTACH %r AS "default"' % conn.engine.url.database
+ )
+ conn.exec_driver_sql(
+ 'CREATE TABLE "default".a (id INTEGER PRIMARY KEY)'
+ )
@classmethod
def teardown_class(cls):
with testing.db.begin() as conn:
try:
- conn.execute('drop table "default".a')
+ conn.exec_driver_sql('drop table "default".a')
except Exception:
pass
- conn.execute('DETACH DATABASE "default"')
+ conn.exec_driver_sql('DETACH DATABASE "default"')
def test_reflect(self):
with testing.db.begin() as conn:
def setup_class(cls):
with testing.db.begin() as conn:
- conn.execute("CREATE TABLE a1 (id INTEGER PRIMARY KEY)")
- conn.execute("CREATE TABLE a2 (id INTEGER PRIMARY KEY)")
- conn.execute(
+ conn.exec_driver_sql("CREATE TABLE a1 (id INTEGER PRIMARY KEY)")
+ conn.exec_driver_sql("CREATE TABLE a2 (id INTEGER PRIMARY KEY)")
+ conn.exec_driver_sql(
"CREATE TABLE b (id INTEGER PRIMARY KEY, "
"FOREIGN KEY(id) REFERENCES a1(id),"
"FOREIGN KEY(id) REFERENCES a2(id)"
")"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE c (id INTEGER, "
"CONSTRAINT bar PRIMARY KEY(id),"
"CONSTRAINT foo1 FOREIGN KEY(id) REFERENCES a1(id),"
"CONSTRAINT foo2 FOREIGN KEY(id) REFERENCES a2(id)"
")"
)
- conn.execute(
+ conn.exec_driver_sql(
# the lower casing + inline is intentional here
"CREATE TABLE d (id INTEGER, x INTEGER unique)"
)
- conn.execute(
+ conn.exec_driver_sql(
# the lower casing + inline is intentional here
"CREATE TABLE d1 "
'(id INTEGER, "some ( STUPID n,ame" INTEGER unique)'
)
- conn.execute(
+ conn.exec_driver_sql(
# the lower casing + inline is intentional here
'CREATE TABLE d2 ( "some STUPID n,ame" INTEGER unique)'
)
- conn.execute(
+ conn.exec_driver_sql(
# the lower casing + inline is intentional here
'CREATE TABLE d3 ( "some STUPID n,ame" INTEGER NULL unique)'
)
- conn.execute(
+ conn.exec_driver_sql(
# lower casing + inline is intentional
"CREATE TABLE e (id INTEGER, x INTEGER references a2(id))"
)
- conn.execute(
+ conn.exec_driver_sql(
'CREATE TABLE e1 (id INTEGER, "some ( STUPID n,ame" INTEGER '
'references a2 ("some ( STUPID n,ame"))'
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE e2 (id INTEGER, "
'"some ( STUPID n,ame" INTEGER NOT NULL '
'references a2 ("some ( STUPID n,ame"))'
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE f (x INTEGER, CONSTRAINT foo_fx UNIQUE(x))"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TEMPORARY TABLE g "
"(x INTEGER, CONSTRAINT foo_gx UNIQUE(x))"
)
- conn.execute(
+ conn.exec_driver_sql(
# intentional broken casing
"CREATE TABLE h (x INTEGER, COnstraINT foo_hx unIQUE(x))"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE i (x INTEGER, y INTEGER, PRIMARY KEY(x, y))"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE j (id INTEGER, q INTEGER, p INTEGER, "
"PRIMARY KEY(id), FOreiGN KEY(q,p) REFERENCes i(x,y))"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE k (id INTEGER, q INTEGER, p INTEGER, "
"PRIMARY KEY(id), "
"conSTRAINT my_fk FOreiGN KEY ( q , p ) "
meta.create_all(conn)
# will contain an "autoindex"
- conn.execute("create table o (foo varchar(20) primary key)")
- conn.execute(
+ conn.exec_driver_sql(
+ "create table o (foo varchar(20) primary key)"
+ )
+ conn.exec_driver_sql(
"CREATE TABLE onud_test (id INTEGER PRIMARY KEY, "
"c1 INTEGER, c2 INTEGER, c3 INTEGER, c4 INTEGER, "
"CONSTRAINT fk1 FOREIGN KEY (c1) REFERENCES a1(id) "
"ON UPDATE NO ACTION)"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE cp ("
"q INTEGER check (q > 1 AND q < 6),\n"
"CONSTRAINT cq CHECK (q == 1 OR (q > 2 AND q < 5))\n"
")"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE implicit_referred (pk integer primary key)"
)
# single col foreign key with no referred column given,
# must assume primary key of referred table
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE implicit_referrer "
"(id integer REFERENCES implicit_referred)"
)
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE implicit_referred_comp "
"(pk1 integer, pk2 integer, primary key (pk1, pk2))"
)
# composite foreign key with no referred columns given,
# must assume primary key of referred table
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE implicit_referrer_comp "
"(id1 integer, id2 integer, foreign key(id1, id2) "
"REFERENCES implicit_referred_comp)"
# worst case - FK that refers to nonexistent table so we cant
# get pks. requires FK pragma is turned off
- conn.execute(
+ conn.exec_driver_sql(
"CREATE TABLE implicit_referrer_comp_fake "
"(id1 integer, id2 integer, foreign key(id1, id2) "
"REFERENCES fake_table)"
"a2",
]:
try:
- conn.execute("drop table %s" % name)
+ conn.exec_driver_sql("drop table %s" % name)
except Exception:
pass
def test_foreign_key_options_unnamed_inline(self):
with testing.db.connect() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"create table foo (id integer, "
"foreign key (id) references bar (id) on update cascade)"
)
@event.listens_for(engine, "begin")
def do_begin(conn):
# emit our own BEGIN
- conn.execute("BEGIN")
+ conn.exec_driver_sql("BEGIN")
return engine
conn = testing.db.connect()
for from_, to_ in self._fixture_as_string(fixture):
inspector = inspect(conn)
- conn.execute("CREATE TABLE foo (data %s)" % from_)
+ conn.exec_driver_sql("CREATE TABLE foo (data %s)" % from_)
try:
if warnings:
getattr(to_, attr, None),
)
finally:
- conn.execute("DROP TABLE foo")
+ conn.exec_driver_sql("DROP TABLE foo")
def test_lookup_direct_lookup(self):
self._test_lookup_direct(self._fixed_lookup_fixture())
trans.rollback()
metadata.bind = None
assert (
- conn.execute("select count(*) from test_table").scalar() == 0
+ conn.exec_driver_sql(
+ "select count(*) from test_table"
+ ).scalar()
+ == 0
)
finally:
metadata.drop_all(bind=conn)
from sqlalchemy import event
from sqlalchemy import ForeignKey
from sqlalchemy import func
+from sqlalchemy import INT
from sqlalchemy import Integer
from sqlalchemy import literal
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import TypeDecorator
+from sqlalchemy import VARCHAR
from sqlalchemy.engine import reflection
+from sqlalchemy.engine.base import Connection
from sqlalchemy.engine.base import Engine
from sqlalchemy.engine.mock import MockConnection
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import is_false
+from sqlalchemy.testing import is_instance_of
from sqlalchemy.testing import is_true
from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
+def _string_deprecation_expect():
+ return testing.expect_deprecated_20(
+ r"Passing a string to Connection.execute\(\) is deprecated "
+ r"and will be removed in version 2.0"
+ )
+
+
class SomeException(Exception):
pass
class ConnectionlessDeprecationTest(fixtures.TestBase):
"""test various things associated with "connectionless" executions."""
+ def check_usage(self, inspector):
+ with inspector._operation_context() as conn:
+ is_instance_of(conn, Connection)
+
def test_inspector_constructor_engine(self):
with testing.expect_deprecated(
r"The __init__\(\) method on Inspector is deprecated and will "
i1 = reflection.Inspector(testing.db)
is_(i1.bind, testing.db)
+ self.check_usage(i1)
def test_inspector_constructor_connection(self):
with testing.db.connect() as conn:
is_(i1.bind, conn)
is_(i1.engine, testing.db)
+ self.check_usage(i1)
def test_inspector_from_engine(self):
with testing.expect_deprecated(
i1 = reflection.Inspector.from_engine(testing.db)
is_(i1.bind, testing.db)
+ self.check_usage(i1)
def test_bind_close_conn(self):
e = testing.db
with engine.connect() as conn:
try:
- conn.execute("SELECT FOO FROM I_DONT_EXIST")
+ conn.exec_driver_sql("SELECT FOO FROM I_DONT_EXIST")
assert False
except tsa.exc.DBAPIError as e:
eq_(canary.mock_calls[0][1][5], e.orig)
)
assert_raises_message(
- TypeError, "I'm not a DBAPI error", c.execute, "select "
+ TypeError,
+ "I'm not a DBAPI error",
+ c.exec_driver_sql,
+ "select ",
)
# no legacy event
eq_(listener.mock_calls, [])
)
+def select1(db):
+ return str(select([1]).compile(dialect=db.dialect))
+
+
class DeprecatedEngineFeatureTest(fixtures.TablesTest):
__backend__ = True
assert_raises(Exception, conn.transaction, fn, 5, value=8)
self._assert_no_data()
+ def test_execute_plain_string(self):
+ with _string_deprecation_expect():
+ testing.db.execute(select1(testing.db)).scalar()
+
+ def test_scalar_plain_string(self):
+ with _string_deprecation_expect():
+ testing.db.scalar(select1(testing.db))
+
+ # Tests for the warning when non dict params are used
+ # @testing.combinations(42, (42,))
+ # def test_execute_positional_non_dicts(self, args):
+ # with testing.expect_deprecated(
+ # r"Usage of tuple or scalars as positional arguments of "
+ # ):
+ # testing.db.execute(text(select1(testing.db)), args).scalar()
+
+ # @testing.combinations(42, (42,))
+ # def test_scalar_positional_non_dicts(self, args):
+ # with testing.expect_deprecated(
+ # r"Usage of tuple or scalars as positional arguments of "
+ # ):
+ # testing.db.scalar(text(select1(testing.db)), args)
+
+
+class DeprecatedConnectionFeatureTest(fixtures.TablesTest):
+ __backend__ = True
+
+ def test_execute_plain_string(self):
+ with _string_deprecation_expect():
+ with testing.db.connect() as conn:
+ conn.execute(select1(testing.db)).scalar()
+
+ def test_scalar_plain_string(self):
+ with _string_deprecation_expect():
+ with testing.db.connect() as conn:
+ conn.scalar(select1(testing.db))
+
+ # Tests for the warning when non dict params are used
+ # @testing.combinations(42, (42,))
+ # def test_execute_positional_non_dicts(self, args):
+ # with testing.expect_deprecated(
+ # r"Usage of tuple or scalars as positional arguments of "
+ # ):
+ # with testing.db.connect() as conn:
+ # conn.execute(text(select1(testing.db)), args).scalar()
+
+ # @testing.combinations(42, (42,))
+ # def test_scalar_positional_non_dicts(self, args):
+ # with testing.expect_deprecated(
+ # r"Usage of tuple or scalars as positional arguments of "
+ # ):
+ # with testing.db.connect() as conn:
+ # conn.scalar(text(select1(testing.db)), args)
+
class DeprecatedReflectionTest(fixtures.TablesTest):
@classmethod
):
c2_branch = c2.connect()
eq_(c2_branch._execution_options, {"foo": "bar"})
+
+
+class RawExecuteTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "users",
+ metadata,
+ Column("user_id", INT, primary_key=True, autoincrement=False),
+ Column("user_name", VARCHAR(20)),
+ )
+ Table(
+ "users_autoinc",
+ metadata,
+ Column(
+ "user_id", INT, primary_key=True, test_needs_autoincrement=True
+ ),
+ Column("user_name", VARCHAR(20)),
+ )
+
+ @testing.fails_on(
+ "postgresql+pg8000",
+ "pg8000 still doesn't allow single paren without params",
+ )
+ def test_no_params_option(self, connection):
+ stmt = (
+ "SELECT '%'"
+ + testing.db.dialect.statement_compiler(
+ testing.db.dialect, None
+ ).default_from()
+ )
+
+ result = (
+ connection.execution_options(no_parameters=True)
+ .execute(stmt)
+ .scalar()
+ )
+ eq_(result, "%")
+
+ @testing.requires.qmark_paramstyle
+ def test_raw_qmark(self, connection):
+ conn = connection
+
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ (1, "jack"),
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ [2, "fred"],
+ )
+
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ [3, "ed"],
+ [4, "horse"],
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ (5, "barney"),
+ (6, "donkey"),
+ )
+
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ 7,
+ "sally",
+ )
+
+ with _string_deprecation_expect():
+ res = conn.execute("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "fred"),
+ (3, "ed"),
+ (4, "horse"),
+ (5, "barney"),
+ (6, "donkey"),
+ (7, "sally"),
+ ]
+ for multiparam, param in [
+ (("jack", "fred"), {}),
+ ((["jack", "fred"],), {}),
+ ]:
+ with _string_deprecation_expect():
+ res = conn.execute(
+ "select * from users where user_name=? or "
+ "user_name=? order by user_id",
+ *multiparam,
+ **param
+ )
+ assert res.fetchall() == [(1, "jack"), (2, "fred")]
+
+ with _string_deprecation_expect():
+ res = conn.execute("select * from users where user_name=?", "jack")
+ assert res.fetchall() == [(1, "jack")]
+
+ @testing.requires.format_paramstyle
+ def test_raw_sprintf(self, connection):
+ conn = connection
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (%s, %s)",
+ [1, "jack"],
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (%s, %s)",
+ [2, "ed"],
+ [3, "horse"],
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) " "values (%s, %s)",
+ 4,
+ "sally",
+ )
+ with _string_deprecation_expect():
+ conn.execute("insert into users (user_id) values (%s)", 5)
+ with _string_deprecation_expect():
+ res = conn.execute("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "ed"),
+ (3, "horse"),
+ (4, "sally"),
+ (5, None),
+ ]
+ for multiparam, param in [
+ (("jack", "ed"), {}),
+ ((["jack", "ed"],), {}),
+ ]:
+ with _string_deprecation_expect():
+ res = conn.execute(
+ "select * from users where user_name=%s or "
+ "user_name=%s order by user_id",
+ *multiparam,
+ **param
+ )
+ assert res.fetchall() == [(1, "jack"), (2, "ed")]
+ with _string_deprecation_expect():
+ res = conn.execute(
+ "select * from users where user_name=%s", "jack"
+ )
+ assert res.fetchall() == [(1, "jack")]
+
+ @testing.requires.pyformat_paramstyle
+ def test_raw_python(self, connection):
+ conn = connection
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ {"id": 1, "name": "jack"},
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ {"id": 2, "name": "ed"},
+ {"id": 3, "name": "horse"},
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ id=4,
+ name="sally",
+ )
+ with _string_deprecation_expect():
+ res = conn.execute("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "ed"),
+ (3, "horse"),
+ (4, "sally"),
+ ]
+
+ @testing.requires.named_paramstyle
+ def test_raw_named(self, connection):
+ conn = connection
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) "
+ "values (:id, :name)",
+ {"id": 1, "name": "jack"},
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) "
+ "values (:id, :name)",
+ {"id": 2, "name": "ed"},
+ {"id": 3, "name": "horse"},
+ )
+ with _string_deprecation_expect():
+ conn.execute(
+ "insert into users (user_id, user_name) "
+ "values (:id, :name)",
+ id=4,
+ name="sally",
+ )
+ with _string_deprecation_expect():
+ res = conn.execute("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "ed"),
+ (3, "horse"),
+ (4, "sally"),
+ ]
from sqlalchemy.testing.util import picklers
-users, metadata, users_autoinc = None, None, None
-
-
class SomeException(Exception):
pass
-class ExecuteTest(fixtures.TestBase):
+class ExecuteTest(fixtures.TablesTest):
__backend__ = True
@classmethod
- def setup_class(cls):
- global users, users_autoinc, metadata
- metadata = MetaData(testing.db)
- users = Table(
+ def define_tables(cls, metadata):
+ Table(
"users",
metadata,
Column("user_id", INT, primary_key=True, autoincrement=False),
Column("user_name", VARCHAR(20)),
)
- users_autoinc = Table(
+ Table(
"users_autoinc",
metadata,
Column(
),
Column("user_name", VARCHAR(20)),
)
- metadata.create_all()
-
- @engines.close_first
- def teardown(self):
- testing.db.execute(users.delete())
-
- @classmethod
- def teardown_class(cls):
- metadata.drop_all()
@testing.fails_on(
"postgresql+pg8000",
)
conn = testing.db.connect()
- result = conn.execution_options(no_parameters=True).scalar(stmt)
+ result = (
+ conn.execution_options(no_parameters=True)
+ .exec_driver_sql(stmt)
+ .scalar()
+ )
eq_(result, "%")
- @testing.fails_on_everything_except(
- "firebird", "sqlite", "+pyodbc", "+mxodbc", "mysql+oursql"
- )
- def test_raw_qmark(self):
- def go(conn):
- conn.execute(
- "insert into users (user_id, user_name) " "values (?, ?)",
- (1, "jack"),
- )
- conn.execute(
- "insert into users (user_id, user_name) " "values (?, ?)",
- [2, "fred"],
- )
- conn.execute(
- "insert into users (user_id, user_name) " "values (?, ?)",
- [3, "ed"],
- [4, "horse"],
- )
- conn.execute(
- "insert into users (user_id, user_name) " "values (?, ?)",
- (5, "barney"),
- (6, "donkey"),
- )
- conn.execute(
- "insert into users (user_id, user_name) " "values (?, ?)",
- 7,
- "sally",
- )
- res = conn.execute("select * from users order by user_id")
- assert res.fetchall() == [
- (1, "jack"),
- (2, "fred"),
- (3, "ed"),
- (4, "horse"),
- (5, "barney"),
- (6, "donkey"),
- (7, "sally"),
- ]
- for multiparam, param in [
- (("jack", "fred"), {}),
- ((["jack", "fred"],), {}),
- ]:
- res = conn.execute(
- "select * from users where user_name=? or "
- "user_name=? order by user_id",
- *multiparam,
- **param
- )
- assert res.fetchall() == [(1, "jack"), (2, "fred")]
- res = conn.execute("select * from users where user_name=?", "jack")
- assert res.fetchall() == [(1, "jack")]
- conn.execute("delete from users")
-
- go(testing.db)
- conn = testing.db.connect()
- try:
- go(conn)
- finally:
- conn.close()
-
- # some psycopg2 versions bomb this.
- @testing.fails_on_everything_except(
- "mysql+mysqldb",
- "mysql+pymysql",
- "mysql+cymysql",
- "mysql+mysqlconnector",
- "postgresql",
- )
- def test_raw_sprintf(self):
- def go(conn):
- conn.execute(
- "insert into users (user_id, user_name) " "values (%s, %s)",
- [1, "jack"],
- )
- conn.execute(
- "insert into users (user_id, user_name) " "values (%s, %s)",
- [2, "ed"],
- [3, "horse"],
- )
- conn.execute(
- "insert into users (user_id, user_name) " "values (%s, %s)",
- 4,
- "sally",
- )
- conn.execute("insert into users (user_id) values (%s)", 5)
- res = conn.execute("select * from users order by user_id")
- assert res.fetchall() == [
- (1, "jack"),
- (2, "ed"),
- (3, "horse"),
- (4, "sally"),
- (5, None),
- ]
- for multiparam, param in [
- (("jack", "ed"), {}),
- ((["jack", "ed"],), {}),
- ]:
- res = conn.execute(
- "select * from users where user_name=%s or "
- "user_name=%s order by user_id",
- *multiparam,
- **param
- )
- assert res.fetchall() == [(1, "jack"), (2, "ed")]
- res = conn.execute(
- "select * from users where user_name=%s", "jack"
- )
- assert res.fetchall() == [(1, "jack")]
-
- conn.execute("delete from users")
-
- go(testing.db)
- conn = testing.db.connect()
- try:
- go(conn)
- finally:
- conn.close()
-
- # pyformat is supported for mysql, but skipping because a few driver
- # versions have a bug that bombs out on this test. (1.2.2b3,
- # 1.2.2c1, 1.2.2)
-
- @testing.skip_if(lambda: testing.against("mysql+mysqldb"), "db-api flaky")
- @testing.fails_on_everything_except(
- "postgresql+psycopg2",
- "postgresql+psycopg2cffi",
- "postgresql+pypostgresql",
- "postgresql+pygresql",
- "mysql+mysqlconnector",
- "mysql+pymysql",
- "mysql+cymysql",
- "mssql+pymssql",
- )
- def test_raw_python(self):
- def go(conn):
- conn.execute(
- "insert into users (user_id, user_name) "
- "values (%(id)s, %(name)s)",
- {"id": 1, "name": "jack"},
- )
- conn.execute(
- "insert into users (user_id, user_name) "
- "values (%(id)s, %(name)s)",
- {"id": 2, "name": "ed"},
- {"id": 3, "name": "horse"},
- )
- conn.execute(
- "insert into users (user_id, user_name) "
- "values (%(id)s, %(name)s)",
- id=4,
- name="sally",
- )
- res = conn.execute("select * from users order by user_id")
- assert res.fetchall() == [
- (1, "jack"),
- (2, "ed"),
- (3, "horse"),
- (4, "sally"),
- ]
- conn.execute("delete from users")
+ def test_raw_positional_invalid(self, connection):
+ assert_raises_message(
+ tsa.exc.ArgumentError,
+ "List argument must consist only of tuples or dictionaries",
+ connection.exec_driver_sql,
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ [2, "fred"],
+ )
- go(testing.db)
- conn = testing.db.connect()
- try:
- go(conn)
- finally:
- conn.close()
+ assert_raises_message(
+ tsa.exc.ArgumentError,
+ "List argument must consist only of tuples or dictionaries",
+ connection.exec_driver_sql,
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ [[3, "ed"], [4, "horse"]],
+ )
- @testing.fails_on_everything_except("sqlite", "oracle+cx_oracle")
- def test_raw_named(self):
- def go(conn):
- conn.execute(
- "insert into users (user_id, user_name) "
- "values (:id, :name)",
- {"id": 1, "name": "jack"},
- )
- conn.execute(
- "insert into users (user_id, user_name) "
- "values (:id, :name)",
- {"id": 2, "name": "ed"},
- {"id": 3, "name": "horse"},
- )
- conn.execute(
- "insert into users (user_id, user_name) "
- "values (:id, :name)",
- id=4,
- name="sally",
- )
- res = conn.execute("select * from users order by user_id")
- assert res.fetchall() == [
- (1, "jack"),
- (2, "ed"),
- (3, "horse"),
- (4, "sally"),
- ]
- conn.execute("delete from users")
+ def test_raw_named_invalid(self, connection):
+ assert_raises(
+ TypeError,
+ connection.exec_driver_sql,
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ {"id": 2, "name": "ed"},
+ {"id": 3, "name": "horse"},
+ )
+ assert_raises(
+ TypeError,
+ connection.exec_driver_sql,
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ id=4,
+ name="sally",
+ )
- go(testing.db)
- conn = testing.db.connect()
- try:
- go(conn)
- finally:
- conn.close()
+ @testing.requires.qmark_paramstyle
+ def test_raw_qmark(self, connection):
+ conn = connection
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ (1, "jack"),
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ (2, "fred"),
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ [(3, "ed"), (4, "horse")],
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ [(5, "barney"), (6, "donkey")],
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (?, ?)",
+ (7, "sally"),
+ )
+ res = conn.exec_driver_sql("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "fred"),
+ (3, "ed"),
+ (4, "horse"),
+ (5, "barney"),
+ (6, "donkey"),
+ (7, "sally"),
+ ]
+
+ res = conn.exec_driver_sql(
+ "select * from users where user_name=?", ("jack",)
+ )
+ assert res.fetchall() == [(1, "jack")]
+
+ @testing.requires.format_paramstyle
+ def test_raw_sprintf(self, connection):
+ conn = connection
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (%s, %s)",
+ (1, "jack"),
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (%s, %s)",
+ [(2, "ed"), (3, "horse")],
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (%s, %s)",
+ (4, "sally"),
+ )
+ conn.exec_driver_sql("insert into users (user_id) values (%s)", (5,))
+ res = conn.exec_driver_sql("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "ed"),
+ (3, "horse"),
+ (4, "sally"),
+ (5, None),
+ ]
+
+ res = conn.exec_driver_sql(
+ "select * from users where user_name=%s", ("jack",)
+ )
+ assert res.fetchall() == [(1, "jack")]
+
+ @testing.requires.pyformat_paramstyle
+ def test_raw_python(self, connection):
+ conn = connection
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ {"id": 1, "name": "jack"},
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ [{"id": 2, "name": "ed"}, {"id": 3, "name": "horse"}],
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) "
+ "values (%(id)s, %(name)s)",
+ dict(id=4, name="sally"),
+ )
+ res = conn.exec_driver_sql("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "ed"),
+ (3, "horse"),
+ (4, "sally"),
+ ]
+
+ @testing.requires.named_paramstyle
+ def test_raw_named(self, connection):
+ conn = connection
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (:id, :name)",
+ {"id": 1, "name": "jack"},
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (:id, :name)",
+ [{"id": 2, "name": "ed"}, {"id": 3, "name": "horse"}],
+ )
+ conn.exec_driver_sql(
+ "insert into users (user_id, user_name) " "values (:id, :name)",
+ {"id": 4, "name": "sally"},
+ )
+ res = conn.exec_driver_sql("select * from users order by user_id")
+ assert res.fetchall() == [
+ (1, "jack"),
+ (2, "ed"),
+ (3, "horse"),
+ (4, "sally"),
+ ]
@testing.engines.close_open_connections
def test_exception_wrapping_dbapi(self):
conn = testing.db.connect()
- for _c in testing.db, conn:
- assert_raises_message(
- tsa.exc.DBAPIError,
- r"not_a_valid_statement",
- _c.execute,
- "not_a_valid_statement",
- )
+ # engine does not have exec_driver_sql
+ assert_raises_message(
+ tsa.exc.DBAPIError,
+ r"not_a_valid_statement",
+ conn.exec_driver_sql,
+ "not_a_valid_statement",
+ )
@testing.requires.sqlite
def test_exception_wrapping_non_dbapi_error(self):
)
assert_raises_message(
- TypeError, "I'm not a DBAPI error", c.execute, "select "
+ TypeError,
+ "I'm not a DBAPI error",
+ c.exec_driver_sql,
+ "select ",
)
eq_(is_disconnect.call_count, 0)
):
with testing.db.connect() as conn:
assert_raises(
- tsa.exc.OperationalError, conn.execute, "select 1"
+ tsa.exc.OperationalError, conn.exec_driver_sql, "select 1"
)
def test_exception_wrapping_non_dbapi_statement(self):
)
_go(testing.db)
- conn = testing.db.connect()
- try:
+ with testing.db.connect() as conn:
_go(conn)
- finally:
- conn.close()
def test_not_an_executable(self):
for obj in (
def test_empty_insert(self):
"""test that execute() interprets [] as a list with no params"""
+ users_autoinc = self.tables.users_autoinc
testing.db.execute(
users_autoinc.insert().values(user_name=bindparam("name", None)),
cached_conn.execute(ins, {"user_name": "u3"})
eq_(compile_mock.call_count, 1)
assert len(cache) == 1
- eq_(conn.execute("select count(*) from users").scalar(), 3)
+ eq_(conn.exec_driver_sql("select count(*) from users").scalar(), 3)
@testing.only_on(
["sqlite", "mysql", "postgresql"],
cached_conn.execute(ins, {"photo_blob": blob})
eq_(compile_mock.call_count, 1)
eq_(len(cache), 1)
- eq_(conn.execute("select count(*) from photo").scalar(), 1)
+ eq_(conn.exec_driver_sql("select count(*) from photo").scalar(), 1)
del blob
with e1.connect() as conn:
- result = conn.execute(stmt)
+ result = conn.exec_driver_sql(stmt)
ctx = result.context
eq_(
t1.insert().execute(c1=5, c2="some data")
t1.insert().execute(c1=6)
eq_(
- engine.execute("select * from t1").fetchall(),
+ engine.execute(text("select * from t1")).fetchall(),
[(5, "some data"), (6, "foo")],
)
finally:
with engine.connect() as conn:
try:
- conn.execute("SELECT FOO FROM I_DONT_EXIST")
+ conn.exec_driver_sql("SELECT FOO FROM I_DONT_EXIST")
assert False
except tsa.exc.DBAPIError as e:
ctx = canary.mock_calls[0][1][0]
assert_raises_message(
MyException,
"my exception",
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR ONE' FROM I_DONT_EXIST",
)
# case 2: return the DBAPI exception we're given;
# no wrapping should occur
assert_raises(
conn.dialect.dbapi.Error,
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR TWO' FROM I_DONT_EXIST",
)
# case 3: normal wrapping
assert_raises(
tsa.exc.DBAPIError,
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR THREE' FROM I_DONT_EXIST",
)
assert_raises_message(
MyException2,
"my exception chained",
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR ONE' FROM I_DONT_EXIST",
)
eq_(patched.call_count, 1)
) as patched:
assert_raises(
MyException1,
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR TWO' FROM I_DONT_EXIST",
)
eq_(patched.call_count, 1)
# by err2
assert_raises(
MyException1,
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR THREE' FROM I_DONT_EXIST",
)
eq_(patched.call_count, 1)
) as patched:
assert_raises(
tsa.exc.DBAPIError,
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR FIVE' FROM I_DONT_EXIST",
)
eq_(patched.call_count, 1)
assert_raises_message(
MyException3,
"my exception short circuit",
- conn.execute,
+ conn.exec_driver_sql,
"SELECT 'ERROR FOUR' FROM I_DONT_EXIST",
)
eq_(patched.call_count, 1)
assert_raises_message(
tsa.exc.OperationalError,
"rollback failed",
- conn.execute,
+ conn.exec_driver_sql,
"insert into i_dont_exist (x) values ('y')",
)
)
assert_raises_message(
- TypeError, "I'm not a DBAPI error", c.execute, "select "
+ TypeError,
+ "I'm not a DBAPI error",
+ c.exec_driver_sql,
+ "select ",
)
ctx = listener.mock_calls[0][1][0]
eq_(ctx.statement, "select ")
with engine.connect() as conn:
assert_raises(
tsa.exc.DBAPIError,
- conn.execution_options(skip_user_error_events=True).execute,
+ conn.execution_options(
+ skip_user_error_events=True
+ ).exec_driver_sql,
"SELECT ERROR_ONE FROM I_DONT_EXIST",
)
assert_raises(
MyException1,
- conn.execution_options(skip_user_error_events=False).execute,
+ conn.execution_options(
+ skip_user_error_events=False
+ ).exec_driver_sql,
"SELECT ERROR_ONE FROM I_DONT_EXIST",
)
with engine.connect() as c:
try:
- c.execute("SELECT x FROM nonexistent")
+ c.exec_driver_sql("SELECT x FROM nonexistent")
assert False
except tsa.exc.StatementError as st:
eq_(st.connection_invalidated, evt_value)
with engine.connect() as c:
target_crec = c.connection._connection_record
try:
- c.execute("SELECT x FROM nonexistent")
+ c.exec_driver_sql("SELECT x FROM nonexistent")
assert False
except tsa.exc.StatementError as st:
eq_(st.connection_invalidated, True)
def _test_do_execute(self, retval):
with self._run_test(retval) as (conn, m1):
- result = conn.execute("insert into table foo", {"foo": "bar"})
+ result = conn.exec_driver_sql(
+ "insert into table foo", {"foo": "bar"}
+ )
self._assert(
retval,
m1.do_execute,
def _test_do_executemany(self, retval):
with self._run_test(retval) as (conn, m1):
- result = conn.execute(
+ result = conn.exec_driver_sql(
"insert into table foo", [{"foo": "bar"}, {"foo": "bar"}]
)
self._assert(
def _test_do_execute_no_params(self, retval):
with self._run_test(retval) as (conn, m1):
- result = conn.execution_options(no_parameters=True).execute(
- "insert into table foo"
- )
+ result = conn.execution_options(
+ no_parameters=True
+ ).exec_driver_sql("insert into table foo")
self._assert(
retval,
m1.do_execute_no_params,
engine.dialect.dbapi = dbapi
with engine.connect() as conn:
- conn.execute("%s something table something" % keyword)
+ conn.exec_driver_sql("%s something table something" % keyword)
if expected:
eq_(dbapi.connect().mock_calls, [call.cursor(), call.commit()])
from sqlalchemy.testing.util import lazy_gc
+def exec_sql(engine, sql, *args, **kwargs):
+ with engine.connect() as conn:
+ return conn.exec_driver_sql(sql, *args, **kwargs)
+
+
class LogParamsTest(fixtures.TestBase):
__only_on__ = "sqlite"
__requires__ = ("ad_hoc_engines",)
self.no_param_engine = engines.testing_engine(
options={"echo": True, "hide_parameters": True}
)
- self.eng.execute("create table if not exists foo (data string)")
- self.no_param_engine.execute(
- "create table if not exists foo (data string)"
+ exec_sql(self.eng, "create table if not exists foo (data string)")
+ exec_sql(
+ self.no_param_engine,
+ "create table if not exists foo (data string)",
)
self.buf = logging.handlers.BufferingHandler(100)
for log in [logging.getLogger("sqlalchemy.engine")]:
log.addHandler(self.buf)
def teardown(self):
- self.eng.execute("drop table if exists foo")
+ exec_sql(self.eng, "drop table if exists foo")
for log in [logging.getLogger("sqlalchemy.engine")]:
log.removeHandler(self.buf)
def test_log_large_list_of_dict(self):
- self.eng.execute(
+ exec_sql(
+ self.eng,
"INSERT INTO foo (data) values (:data)",
[{"data": str(i)} for i in range(100)],
)
)
def test_log_no_parameters(self):
- self.no_param_engine.execute(
+ exec_sql(
+ self.no_param_engine,
"INSERT INTO foo (data) values (:data)",
[{"data": str(i)} for i in range(100)],
)
)
def test_log_large_list_of_tuple(self):
- self.eng.execute(
+ exec_sql(
+ self.eng,
"INSERT INTO foo (data) values (?)",
[(str(i),) for i in range(100)],
)
largeparam = "".join(chr(random.randint(52, 85)) for i in range(5000))
- self.eng.execute("INSERT INTO foo (data) values (?)", (largeparam,))
+ exec_sql(self.eng, "INSERT INTO foo (data) values (?)", (largeparam,))
eq_(
self.buf.buffer[1].message,
lp2 = "".join(chr(random.randint(52, 85)) for i in range(8))
lp3 = "".join(chr(random.randint(52, 85)) for i in range(670))
- self.eng.execute("SELECT ?, ?, ?", (lp1, lp2, lp3))
+ exec_sql(self.eng, "SELECT ?, ?, ?", (lp1, lp2, lp3))
eq_(
self.buf.buffer[1].message,
lp2 = "".join(chr(random.randint(52, 85)) for i in range(200))
lp3 = "".join(chr(random.randint(52, 85)) for i in range(670))
- self.eng.execute(
- "INSERT INTO foo (data) values (?)", [(lp1,), (lp2,), (lp3,)]
+ exec_sql(
+ self.eng,
+ "INSERT INTO foo (data) values (?)",
+ [(lp1,), (lp2,), (lp3,)],
)
eq_(
tsa.exc.DBAPIError,
r".*INSERT INTO nonexistent \(data\) values \(:data\)\]\n"
r"\[SQL parameters hidden due to hide_parameters=True\]",
- lambda: self.no_param_engine.execute(
+ lambda: exec_sql(
+ self.no_param_engine,
"INSERT INTO nonexistent (data) values (:data)",
[{"data": str(i)} for i in range(10)],
),
largeparam = "".join(chr(random.randint(52, 85)) for i in range(5000))
self.eng.echo = "debug"
- result = self.eng.execute("SELECT ?", (largeparam,))
+ result = exec_sql(self.eng, "SELECT ?", (largeparam,))
row = result.first()
r"{'data': '6'}, {'data': '7'} ... displaying 10 of "
r"100 total bound parameter sets ... {'data': '98'}, "
r"{'data': '99'}\]",
- lambda: self.eng.execute(
+ lambda: exec_sql(
+ self.eng,
"INSERT INTO nonexistent (data) values (:data)",
[{"data": str(i)} for i in range(100)],
),
r"... displaying "
r"10 of 100 total bound parameter sets ... "
r"\('98',\), \('99',\)\]",
- lambda: self.eng.execute(
+ lambda: exec_sql(
+ self.eng,
"INSERT INTO nonexistent (data) values (?)",
[(str(i),) for i in range(100)],
),
def test_cursor_explode(self):
db = self._fixture(False, False)
conn = db.connect()
- result = conn.execute("select foo")
+ result = conn.exec_driver_sql("select foo")
result.close()
conn.close()
eq_(
engine = engines.testing_engine()
def broken_initialize(connection):
- connection.execute("select fake_stuff from _fake_table")
+ connection.exec_driver_sql("select fake_stuff from _fake_table")
engine.dialect.initialize = broken_initialize
engine = engines.testing_engine()
def broken_initialize(connection):
- connection.execute("select fake_stuff from _fake_table")
+ connection.exec_driver_sql("select fake_stuff from _fake_table")
engine.dialect.initialize = broken_initialize
)
def test_invalidate_on_results(self):
conn = self.engine.connect()
- result = conn.execute("select * from sometable")
+ result = conn.exec_driver_sql("select * from sometable")
for x in range(20):
result.fetchone()
self.engine.test_shutdown()
indexes"""
with testing.db.begin() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"""
CREATE TABLE book (
id INTEGER NOT NULL,
"""test reflection of a composite primary key"""
with testing.db.begin() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"""
CREATE TABLE book (
id INTEGER NOT NULL,
@testing.requires.denormalized_names
def setup(self):
- testing.db.execute(
+ with testing.db.connect() as conn:
+ conn.exec_driver_sql(
+ """
+ CREATE TABLE weird_casing(
+ col1 char(20),
+ "Col2" char(20),
+ "col3" char(20)
+ )
"""
- CREATE TABLE weird_casing(
- col1 char(20),
- "Col2" char(20),
- "col3" char(20)
- )
- """
- )
+ )
@testing.requires.denormalized_names
def teardown(self):
- testing.db.execute("drop table weird_casing")
+ with testing.db.connect() as conn:
+ conn.exec_driver_sql("drop table weird_casing")
@testing.requires.denormalized_names
def test_direct_quoting(self):
transaction.commit()
transaction = connection.begin()
- result = connection.execute("select * from query_users")
+ result = connection.exec_driver_sql("select * from query_users")
assert len(result.fetchall()) == 3
transaction.commit()
connection.close()
connection.execute(users.insert(), user_id=2, user_name="user2")
connection.execute(users.insert(), user_id=3, user_name="user3")
transaction.rollback()
- result = connection.execute("select * from query_users")
+ result = connection.exec_driver_sql("select * from query_users")
assert len(result.fetchall()) == 0
connection.close()
print("Exception: ", e)
transaction.rollback()
- result = connection.execute("select * from query_users")
+ result = connection.exec_driver_sql("select * from query_users")
assert len(result.fetchall()) == 0
connection.close()
assert_raises_message(
exc.InvalidRequestError,
"This connection is on an inactive transaction. Please",
- connection.execute,
+ connection.exec_driver_sql,
"select 1",
)
assert branched.in_transaction()
branched.execute(users.insert(), user_id=2, user_name="user2")
nested.rollback()
- eq_(connection.scalar("select count(*) from query_users"), 1)
+ eq_(
+ connection.exec_driver_sql(
+ "select count(*) from query_users"
+ ).scalar(),
+ 1,
+ )
finally:
connection.close()
branched.execute(users.insert(), user_id=1, user_name="user1")
finally:
connection.close()
- eq_(testing.db.scalar("select count(*) from query_users"), 1)
+ eq_(
+ testing.db.execute(
+ text("select count(*) from query_users")
+ ).scalar(),
+ 1,
+ )
@testing.requires.savepoints
def test_branch_savepoint_rollback(self):
nested.rollback()
assert connection.in_transaction()
trans.commit()
- eq_(connection.scalar("select count(*) from query_users"), 1)
+ eq_(
+ connection.exec_driver_sql(
+ "select count(*) from query_users"
+ ).scalar(),
+ 1,
+ )
finally:
connection.close()
branched.execute(users.insert(), user_id=2, user_name="user2")
nested.rollback()
assert not connection.in_transaction()
- eq_(connection.scalar("select count(*) from query_users"), 1)
+ eq_(
+ connection.exec_driver_sql(
+ "select count(*) from query_users"
+ ).scalar(),
+ 1,
+ )
finally:
connection.close()
conn2 = connection.execution_options(dummy=True)
conn2.execute(users.insert(), user_id=2, user_name="user2")
transaction.rollback()
- eq_(connection.scalar("select count(*) from query_users"), 0)
+ eq_(
+ connection.exec_driver_sql(
+ "select count(*) from query_users"
+ ).scalar(),
+ 0,
+ )
finally:
connection.close()
trans2.commit()
transaction.rollback()
self.assert_(
- connection.scalar("select count(*) from " "query_users") == 0
+ connection.exec_driver_sql(
+ "select count(*) from " "query_users"
+ ).scalar()
+ == 0
)
- result = connection.execute("select * from query_users")
+ result = connection.exec_driver_sql("select * from query_users")
assert len(result.fetchall()) == 0
connection.close()
assert not trans.is_active
self.assert_(
- connection.scalar("select count(*) from " "query_users") == 0
+ connection.exec_driver_sql(
+ "select count(*) from " "query_users"
+ ).scalar()
+ == 0
)
trans = connection.begin()
trans.__exit__(None, None, None)
assert not trans.is_active
self.assert_(
- connection.scalar("select count(*) from " "query_users") == 1
+ connection.exec_driver_sql(
+ "select count(*) from " "query_users"
+ ).scalar()
+ == 1
)
connection.close()
transaction.commit()
assert not connection.in_transaction()
self.assert_(
- connection.scalar("select count(*) from " "query_users") == 5
+ connection.exec_driver_sql(
+ "select count(*) from " "query_users"
+ ).scalar()
+ == 5
)
- result = connection.execute("select * from query_users")
+ result = connection.exec_driver_sql("select * from query_users")
assert len(result.fetchall()) == 5
connection.close()
transaction.close()
assert not connection.in_transaction()
self.assert_(
- connection.scalar("select count(*) from " "query_users") == 0
+ connection.exec_driver_sql(
+ "select count(*) from " "query_users"
+ ).scalar()
+ == 0
)
- result = connection.execute("select * from query_users")
+ result = connection.exec_driver_sql("select * from query_users")
assert len(result.fetchall()) == 0
connection.close()
assert_raises_message(
exc.InvalidRequestError,
"This connection is on an inactive savepoint transaction.",
- connection.execute,
+ connection.exec_driver_sql,
"select 1",
)
trans2.rollback()
test_needs_acid=True,
)
users.create(conn1)
- conn1.execute("select * from deadlock_users")
+ conn1.exec_driver_sql("select * from deadlock_users")
conn1.close()
# without auto-rollback in the connection pool's return() logic,
Column("id", Integer, primary_key=True),
Column("data", String(100)),
)
- metadata.create_all()
- testing.db.execute(
- "create function insert_foo(varchar) "
- "returns integer as 'insert into foo(data) "
- "values ($1);select 1;' language sql"
- )
+ with testing.db.connect() as conn:
+ metadata.create_all(conn)
+ conn.exec_driver_sql(
+ "create function insert_foo(varchar) "
+ "returns integer as 'insert into foo(data) "
+ "values ($1);select 1;' language sql"
+ )
def teardown(self):
- foo.delete().execute().close()
+ with testing.db.connect() as conn:
+ conn.execute(foo.delete())
@classmethod
def teardown_class(cls):
- testing.db.execute("drop function insert_foo(varchar)")
- metadata.drop_all()
+ with testing.db.connect() as conn:
+ conn.exec_driver_sql("drop function insert_foo(varchar)")
+ metadata.drop_all(conn)
def test_control(self):
e = testing_engine("sqlite://")
with e.connect() as conn:
for i in range(1, 5):
- conn.execute(
+ conn.exec_driver_sql(
'ATTACH DATABASE "shard%s_%s.db" AS shard%s'
% (i, provision.FOLLOWER_IDENT, i)
)
sess.flush()
sess.close()
assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 0
+ assert c.exec_driver_sql("select count(1) from users").scalar() == 0
sess = create_session(bind=c, autocommit=False)
u = User(name="u2")
sess.flush()
sess.commit()
assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
- c.execute("delete from users")
- assert c.scalar("select count(1) from users") == 0
+ assert c.exec_driver_sql("select count(1) from users").scalar() == 1
+ c.exec_driver_sql("delete from users")
+ assert c.exec_driver_sql("select count(1) from users").scalar() == 0
c = testing.db.connect()
assert c.in_transaction()
trans.commit()
assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
+ assert c.exec_driver_sql("select count(1) from users").scalar() == 1
class SessionBindTest(fixtures.MappedTest):
User, users = self.classes.User, self.tables.users
c = testing.db.connect()
- c.execute("select * from users")
+ c.exec_driver_sql("select * from users")
mapper(User, users)
s = create_session(bind=c)
s.add(User(name="first"))
s.flush()
- c.execute("select * from users")
+ c.exec_driver_sql("select * from users")
def test_close(self):
"""close() doesn't close a connection the session didn't open"""
User, users = self.classes.User, self.tables.users
c = testing.db.connect()
- c.execute("select * from users")
+ c.exec_driver_sql("select * from users")
mapper(User, users)
s = create_session(bind=c)
s.add(User(name="first"))
s.flush()
- c.execute("select * from users")
+ c.exec_driver_sql("select * from users")
s.close()
- c.execute("select * from users")
+ c.exec_driver_sql("select * from users")
def test_autobegin_execute(self):
# test the new autobegin behavior introduced in #5074
u = User(name="x")
sess.add(u)
sess.flush()
- assert conn1.execute("select count(1) from users").scalar() == 1
- assert conn2.execute("select count(1) from users").scalar() == 0
+ assert (
+ conn1.exec_driver_sql("select count(1) from users").scalar() == 1
+ )
+ assert (
+ conn2.exec_driver_sql("select count(1) from users").scalar() == 0
+ )
sess.commit()
- assert conn1.execute("select count(1) from users").scalar() == 1
+ assert (
+ conn1.exec_driver_sql("select count(1) from users").scalar() == 1
+ )
assert (
- testing.db.connect().execute("select count(1) from users").scalar()
+ testing.db.connect()
+ .exec_driver_sql("select count(1) from users")
+ .scalar()
== 1
)
sess.close()
sess.add(u)
u2 = sess.query(User).filter_by(name="ed").one()
assert u2 is u
- eq_(conn1.execute("select count(1) from users").scalar(), 1)
- eq_(conn2.execute("select count(1) from users").scalar(), 0)
+ eq_(conn1.exec_driver_sql("select count(1) from users").scalar(), 1)
+ eq_(conn2.exec_driver_sql("select count(1) from users").scalar(), 0)
sess.commit()
- eq_(conn1.execute("select count(1) from users").scalar(), 1)
- eq_(bind.connect().execute("select count(1) from users").scalar(), 1)
+ eq_(conn1.exec_driver_sql("select count(1) from users").scalar(), 1)
+ eq_(
+ bind.connect()
+ .exec_driver_sql("select count(1) from users")
+ .scalar(),
+ 1,
+ )
sess.close()
def test_with_no_autoflush(self):
)
assert (
testing.db.connect()
- .execute("select count(1) from users")
+ .exec_driver_sql("select count(1) from users")
.scalar()
== 0
)
)
assert (
testing.db.connect()
- .execute("select count(1) from users")
+ .exec_driver_sql("select count(1) from users")
.scalar()
== 1
)
u.name = "ed"
sess.add(u)
sess.commit()
- assert conn1.execute("select count(1) from users").scalar() == 1
assert (
- testing.db.connect().execute("select count(1) from users").scalar()
+ conn1.exec_driver_sql("select count(1) from users").scalar() == 1
+ )
+ assert (
+ testing.db.connect()
+ .exec_driver_sql("select count(1) from users")
+ .scalar()
== 1
)
sess.commit()
tran = s.transaction
s.add(User(name="first"))
s.flush()
- c.execute("select * from users")
+ c.exec_driver_sql("select * from users")
u = User(name="two")
s.add(u)
s.flush()
session.begin_nested()
session.connection().execute(users.insert().values(name="user2"))
assert (
- session.connection().execute("select count(1) from users").scalar()
+ session.connection()
+ .exec_driver_sql("select count(1) from users")
+ .scalar()
== 2
)
session.rollback()
assert (
- session.connection().execute("select count(1) from users").scalar()
+ session.connection()
+ .exec_driver_sql("select count(1) from users")
+ .scalar()
== 1
)
session.connection().execute(users.insert().values(name="user3"))
session.commit()
assert (
- session.connection().execute("select count(1) from users").scalar()
+ session.connection()
+ .exec_driver_sql("select count(1) from users")
+ .scalar()
== 2
)
try:
conn.begin()
for i in range(5):
- conn.execute("SELECT 1+1")
+ conn.exec_driver_sql("SELECT 1+1")
gevent.sleep(random.random() * 1.01)
except Exception:
gevent.sleep(3)
while True:
- result = list(engine.execute("show processlist"))
- engine.execute("kill %d" % result[-2][0])
+ result = list(engine.exec_driver_sql("show processlist"))
+ engine.exec_driver_sql("kill %d" % result[-2][0])
print("\n\n\n BOOM!!!!! \n\n\n")
gevent.sleep(5)
print(engine.pool.status())
"""
return skip_if(["firebird", "mssql+mxodbc"], "not supported by driver")
+ @property
+ def qmark_paramstyle(self):
+ return only_on(
+ ["firebird", "sqlite", "+pyodbc", "+mxodbc", "mysql+oursql"]
+ )
+
+ @property
+ def named_paramstyle(self):
+ return only_on(["sqlite", "oracle+cx_oracle"])
+
+ @property
+ def format_paramstyle(self):
+ return only_on(
+ [
+ "mysql+mysqldb",
+ "mysql+pymysql",
+ "mysql+cymysql",
+ "mysql+mysqlconnector",
+ "postgresql",
+ ]
+ )
+
+ @property
+ def pyformat_paramstyle(self):
+ return only_on(
+ [
+ "postgresql+psycopg2",
+ "postgresql+psycopg2cffi",
+ "postgresql+pypostgresql",
+ "postgresql+pygresql",
+ "mysql+mysqlconnector",
+ "mysql+pymysql",
+ "mysql+cymysql",
+ "mssql+pymssql",
+ ]
+ )
+
@property
def no_quoting_special_bind_names(self):
"""Target database will quote bound parameter names, doesn't support
with config.db.connect() as conn:
try:
return (
- conn.scalar(
+ conn.exec_driver_sql(
"""select json_extract('{"foo": "bar"}', """
"""'$."foo"')"""
- )
+ ).scalar()
== "bar"
)
except exc.DBAPIError:
def check(config):
if not against(config, "postgresql"):
return False
- count = config.db.scalar(
- "SELECT count(*) FROM pg_extension "
- "WHERE extname='%s'" % name
+ count = (
+ config.db.connect(close_with_result=True)
+ .exec_driver_sql(
+ "SELECT count(*) FROM pg_extension "
+ "WHERE extname='%s'" % name
+ )
+ .scalar()
)
return bool(count)
):
return False
try:
- config.db.scalar("select '[1,2)'::int4range;")
+ config.db.connect(close_with_result=True).exec_driver_sql(
+ "select '[1,2)'::int4range;"
+ ).scalar()
return True
except Exception:
return False
if not against(config, "mysql"):
return False
- row = config.db.execute("show variables like 'sql_mode'").first()
+ row = (
+ config.db.connect(close_with_result=True)
+ .exec_driver_sql("show variables like 'sql_mode'")
+ .first()
+ )
return not row or "NO_ZERO_DATE" not in row[1]
return only_if(check)
if not against(config, "mysql"):
return False
- row = config.db.execute("show variables like 'sql_mode'").first()
+ row = (
+ config.db.connect(close_with_result=True)
+ .exec_driver_sql("show variables like 'sql_mode'")
+ .first()
+ )
return not row or "STRICT_TRANS_TABLES" not in row[1]
return only_if(check)
@property
def postgresql_utf8_server_encoding(self):
+
return only_if(
lambda config: against(config, "postgresql")
- and config.db.scalar("show server_encoding").lower() == "utf8"
+ and config.db.connect(close_with_result=True)
+ .exec_driver_sql("show server_encoding")
+ .scalar()
+ .lower()
+ == "utf8"
)
@property
def test_default_textual_w_default(self):
with testing.db.connect() as conn:
- conn.execute("insert into t_seq_test (data) values ('some data')")
+ conn.exec_driver_sql(
+ "insert into t_seq_test (data) values ('some data')"
+ )
- eq_(conn.scalar("select id from t_seq_test"), 1)
+ eq_(conn.exec_driver_sql("select id from t_seq_test").scalar(), 1)
def test_default_core_w_default(self):
t_seq_test = self.tables.t_seq_test
def test_default_textual_server_only(self):
with testing.db.connect() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"insert into t_seq_test_2 (data) values ('some data')"
)
- eq_(conn.scalar("select id from t_seq_test_2"), 1)
+ eq_(
+ conn.exec_driver_sql("select id from t_seq_test_2").scalar(), 1
+ )
def test_default_core_server_only(self):
t_seq_test = self.tables.t_seq_test_2
def test_noop_for_unhandled_objects(self):
with self.bind.connect() as conn:
- conn.execute("SELECT 1;").fetchone()
+ conn.exec_driver_sql("SELECT 1;").fetchone()
def test_does_not_modify_query(self):
with self.bind.connect() as conn:
@testing.provide_metadata
def test_has_table_case_sensitive(self):
preparer = testing.db.dialect.identifier_preparer
- if testing.db.dialect.requires_name_normalize:
- testing.db.execute("CREATE TABLE TAB1 (id INTEGER)")
- else:
- testing.db.execute("CREATE TABLE tab1 (id INTEGER)")
- testing.db.execute(
- "CREATE TABLE %s (id INTEGER)" % preparer.quote_identifier("tab2")
- )
- testing.db.execute(
- "CREATE TABLE %s (id INTEGER)" % preparer.quote_identifier("TAB3")
- )
- testing.db.execute(
- "CREATE TABLE %s (id INTEGER)" % preparer.quote_identifier("TAB4")
- )
+ with testing.db.connect() as conn:
+ if conn.dialect.requires_name_normalize:
+ conn.exec_driver_sql("CREATE TABLE TAB1 (id INTEGER)")
+ else:
+ conn.exec_driver_sql("CREATE TABLE tab1 (id INTEGER)")
+ conn.exec_driver_sql(
+ "CREATE TABLE %s (id INTEGER)"
+ % preparer.quote_identifier("tab2")
+ )
+ conn.exec_driver_sql(
+ "CREATE TABLE %s (id INTEGER)"
+ % preparer.quote_identifier("TAB3")
+ )
+ conn.exec_driver_sql(
+ "CREATE TABLE %s (id INTEGER)"
+ % preparer.quote_identifier("TAB4")
+ )
t1 = Table(
"tab1", self.metadata, Column("id", Integer, primary_key=True)
)
trans.rollback()
- def test_fetchone_til_end(self):
- result = testing.db.execute("select * from users")
+ def test_fetchone_til_end(self, connection):
+ result = connection.exec_driver_sql("select * from users")
eq_(result.fetchone(), None)
eq_(result.fetchone(), None)
eq_(result.fetchone(), None)
def test_connectionless_autoclose_rows_exhausted(self):
users = self.tables.users
- users.insert().execute(dict(user_id=1, user_name="john"))
+ with testing.db.connect() as conn:
+ conn.execute(users.insert(), dict(user_id=1, user_name="john"))
- result = testing.db.execute("select * from users")
+ result = testing.db.execute(text("select * from users"))
connection = result.connection
assert not connection.closed
eq_(result.fetchone(), (1, "john"))
assert connection.closed
def test_connectionless_autoclose_no_rows(self):
- result = testing.db.execute("select * from users")
+ result = testing.db.execute(text("select * from users"))
connection = result.connection
assert not connection.closed
eq_(result.fetchone(), None)
@testing.requires.updateable_autoincrement_pks
def test_connectionless_autoclose_no_metadata(self):
- result = testing.db.execute("update users set user_id=5")
+ result = testing.db.execute(text("update users set user_id=5"))
connection = result.connection
assert connection.closed
assert_raises_message(
[("user_id", 1), ("user_name", "foo")],
)
- def test_len(self):
+ def test_len(self, connection):
users = self.tables.users
- users.insert().execute(user_id=1, user_name="foo")
- r = users.select().execute().first()
+ connection.execute(users.insert(), dict(user_id=1, user_name="foo"))
+ r = connection.execute(users.select()).first()
eq_(len(r), 2)
- r = testing.db.execute("select user_name, user_id from users").first()
+ r = connection.exec_driver_sql(
+ "select user_name, user_id from users"
+ ).first()
eq_(len(r), 2)
- r = testing.db.execute("select user_name from users").first()
+ r = connection.exec_driver_sql("select user_name from users").first()
eq_(len(r), 1)
def test_sorting_in_python(self):
eq_([x.lower() for x in r._fields], ["user_id", "user_name"])
eq_(list(r._mapping.values()), [1, "foo"])
- def test_column_order_with_text_query(self):
+ def test_column_order_with_text_query(self, connection):
# should return values in query order
users = self.tables.users
- users.insert().execute(user_id=1, user_name="foo")
- r = testing.db.execute("select user_name, user_id from users").first()
+ connection.execute(users.insert(), dict(user_id=1, user_name="foo"))
+
+ r = connection.exec_driver_sql(
+ "select user_name, user_id from users"
+ ).first()
eq_(r[0], "foo")
eq_(r[1], 1)
eq_([x.lower() for x in r._fields], ["user_name", "user_id"])
eq_(row[1:0:-1], ("Uno",))
@testing.only_on("sqlite")
- def test_row_getitem_indexes_raw(self):
- row = testing.db.execute("select 'One' as key, 'Uno' as value").first()
+ def test_row_getitem_indexes_raw(self, connection):
+ row = connection.exec_driver_sql(
+ "select 'One' as key, 'Uno' as value"
+ ).first()
eq_(row._mapping["key"], "One")
eq_(row._mapping["value"], "Uno")
eq_(row[0], "One")
assert s.getvalue().strip() == "1,Test"
@testing.requires.selectone
- def test_empty_accessors(self):
+ def test_empty_accessors(self, connection):
statements = [
(
"select 1",
]
for stmt, meths, msg in statements:
- r = testing.db.execute(stmt)
+ if isinstance(stmt, str):
+ r = connection.exec_driver_sql(stmt)
+ else:
+ r = connection.execute(stmt)
try:
for meth in meths:
assert_raises_message(
)
@testing.fails_on_everything_except("postgresql", "firebird")
- def test_literal_returning(self):
+ def test_literal_returning(self, connection):
if testing.against("postgresql"):
literal_true = "true"
else:
literal_true = "1"
- result4 = testing.db.execute(
+ result4 = connection.exec_driver_sql(
'insert into tables (id, persons, "full") '
"values (5, 10, %s) returning persons" % literal_true
)
class RoundTripTestBase(object):
- def test_round_trip(self):
- testing.db.execute(
+ def test_round_trip(self, connection):
+ connection.execute(
self.tables.test_table.insert(),
{"x": "X1", "y": "Y1"},
{"x": "X2", "y": "Y2"},
# test insert coercion alone
eq_(
- testing.db.execute(
+ connection.exec_driver_sql(
"select * from test_table order by y"
).fetchall(),
[("X1", "y1"), ("X2", "y2"), ("X3", "y3")],
# conversion back to upper
eq_(
- testing.db.execute(
+ connection.execute(
select([self.tables.test_table]).order_by(
self.tables.test_table.c.y
)
eq_(conn.scalar(select([non_native_enum_table.c.someenum])), None)
@testing.requires.enforces_check_constraints
- def test_check_constraint(self):
+ def test_check_constraint(self, connection):
assert_raises(
(
exc.IntegrityError,
# https://github.com/PyMySQL/PyMySQL/issues/607 is resolved
exc.InternalError,
),
- testing.db.execute,
+ connection.exec_driver_sql,
"insert into non_native_enum_table "
"(id, someenum) values(1, 'four')",
)
self.metadata.create_all(conn)
assert_raises(
(exc.DBAPIError,),
- conn.execute,
+ conn.exec_driver_sql,
"insert into my_table " "(data) values('four')",
)
- conn.execute("insert into my_table (data) values ('two')")
+ conn.exec_driver_sql("insert into my_table (data) values ('two')")
@testing.requires.enforces_check_constraints
@testing.provide_metadata
self.metadata.create_all(conn)
assert_raises(
(exc.DBAPIError,),
- conn.execute,
+ conn.exec_driver_sql,
"insert into my_table " "(data) values('two')",
)
- conn.execute("insert into my_table (data) values ('four')")
+ conn.exec_driver_sql("insert into my_table (data) values ('four')")
def test_skip_check_constraint(self):
with testing.db.connect() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"insert into non_native_enum_table "
"(id, someotherenum) values(1, 'four')"
)
eq_(
- conn.scalar("select someotherenum from non_native_enum_table"),
+ conn.exec_driver_sql(
+ "select someotherenum from non_native_enum_table"
+ ).scalar(),
"four",
)
assert_raises_message(
def teardown_class(cls):
meta.drop_all()
- def test_control(self):
- assert testing.db.execute("select avalue from test").scalar() == 250
+ def test_control(self, connection):
+ assert (
+ connection.exec_driver_sql("select avalue from test").scalar()
+ == 250
+ )
eq_(
- test_table.select().execute().fetchall(),
+ connection.execute(test_table.select()).fetchall(),
[
(
1,
@testing.fails_on("sqlite", "Doesn't provide Decimal results natively")
@testing.provide_metadata
- def test_decimal_fp(self):
+ def test_decimal_fp(self, connection):
metadata = self.metadata
self._fixture(metadata, Numeric(10, 5), decimal.Decimal("45.5"))
- val = testing.db.execute("select val from t").scalar()
+ val = connection.exec_driver_sql("select val from t").scalar()
assert isinstance(val, decimal.Decimal)
eq_(val, decimal.Decimal("45.5"))
@testing.fails_on("sqlite", "Doesn't provide Decimal results natively")
@testing.provide_metadata
- def test_decimal_int(self):
+ def test_decimal_int(self, connection):
metadata = self.metadata
self._fixture(metadata, Numeric(10, 5), decimal.Decimal("45"))
- val = testing.db.execute("select val from t").scalar()
+ val = connection.exec_driver_sql("select val from t").scalar()
assert isinstance(val, decimal.Decimal)
eq_(val, decimal.Decimal("45"))
@testing.provide_metadata
- def test_ints(self):
+ def test_ints(self, connection):
metadata = self.metadata
self._fixture(metadata, Integer, 45)
- val = testing.db.execute("select val from t").scalar()
+ val = connection.exec_driver_sql("select val from t").scalar()
assert isinstance(val, util.int_types)
eq_(val, 45)
@testing.provide_metadata
- def test_float(self):
+ def test_float(self, connection):
metadata = self.metadata
self._fixture(metadata, Float, 46.583)
- val = testing.db.execute("select val from t").scalar()
+ val = connection.exec_driver_sql("select val from t").scalar()
assert isinstance(val, float)
# some DBAPIs have unusual float handling
)
@testing.fails_on("mssql", "FIXME: MS-SQL 2005 doesn't honor CHECK ?!?")
@testing.skip_if(lambda: testing.db.dialect.supports_native_boolean)
- def test_constraint(self):
+ def test_constraint(self, connection):
assert_raises(
(exc.IntegrityError, exc.ProgrammingError),
- testing.db.execute,
+ connection.exec_driver_sql,
"insert into boolean_table (id, value) values(1, 5)",
)
@testing.skip_if(lambda: testing.db.dialect.supports_native_boolean)
- def test_unconstrained(self):
- testing.db.execute(
+ def test_unconstrained(self, connection):
+ connection.exec_driver_sql(
"insert into boolean_table (id, unconstrained_value)"
"values (1, 5)"
)
def test_nonnative_processor_coerces_integer_to_boolean(self):
boolean_table = self.tables.boolean_table
with testing.db.connect() as conn:
- conn.execute(
+ conn.exec_driver_sql(
"insert into boolean_table (id, unconstrained_value) "
"values (1, 5)"
)
eq_(
- conn.scalar("select unconstrained_value from boolean_table"), 5
+ conn.exec_driver_sql(
+ "select unconstrained_value from boolean_table"
+ ).scalar(),
+ 5,
)
eq_(