def setup_class(cls):
con = testing.db.connect()
try:
- con.execute('CREATE DOMAIN int_domain AS INTEGER DEFAULT 42 NOT NULL')
+ con.execute('CREATE DOMAIN int_domain AS INTEGER DEFAULT '
+ '42 NOT NULL')
con.execute('CREATE DOMAIN str_domain AS VARCHAR(255)')
- con.execute('CREATE DOMAIN rem_domain AS BLOB SUB_TYPE TEXT')
- con.execute('CREATE DOMAIN img_domain AS BLOB SUB_TYPE BINARY')
+ con.execute('CREATE DOMAIN rem_domain AS BLOB SUB_TYPE TEXT'
+ )
+ con.execute('CREATE DOMAIN img_domain AS BLOB SUB_TYPE '
+ 'BINARY')
except ProgrammingError, e:
- if not "attempt to store duplicate value" in str(e):
+ if not 'attempt to store duplicate value' in str(e):
raise e
con.execute('''CREATE GENERATOR gen_testtable_id''')
con.execute('''CREATE TABLE testtable (question int_domain,
- answer str_domain DEFAULT 'no answer',
- remark rem_domain DEFAULT '',
- photo img_domain,
- d date,
- t time,
- dt timestamp,
- redundant str_domain DEFAULT NULL)''')
- con.execute('''ALTER TABLE testtable
- ADD CONSTRAINT testtable_pk PRIMARY KEY (question)''')
- con.execute('''CREATE TRIGGER testtable_autoid FOR testtable
- ACTIVE BEFORE INSERT AS
- BEGIN
- IF (NEW.question IS NULL) THEN
- NEW.question = gen_id(gen_testtable_id, 1);
- END''')
+ answer str_domain DEFAULT 'no answer',
+ remark rem_domain DEFAULT '',
+ photo img_domain,
+ d date,
+ t time,
+ dt timestamp,
+ redundant str_domain DEFAULT NULL)''')
+ con.execute("ALTER TABLE testtable "
+ "ADD CONSTRAINT testtable_pk PRIMARY KEY "
+ "(question)")
+ con.execute("CREATE TRIGGER testtable_autoid FOR testtable "
+ " ACTIVE BEFORE INSERT AS"
+ " BEGIN"
+ " IF (NEW.question IS NULL) THEN"
+ " NEW.question = gen_id(gen_testtable_id, 1);"
+ " END")
@classmethod
def teardown_class(cls):
con.execute('DROP GENERATOR gen_testtable_id')
def test_table_is_reflected(self):
- from sqlalchemy.types import Integer, Text, BLOB, String, Date, Time, DateTime
+ from sqlalchemy.types import Integer, Text, BLOB, String, Date, \
+ Time, DateTime
metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
- eq_(set(table.columns.keys()),
- set(['question', 'answer', 'remark',
- 'photo', 'd', 't', 'dt', 'redundant']),
- "Columns of reflected table didn't equal expected columns")
+ eq_(set(table.columns.keys()), set([
+ 'question',
+ 'answer',
+ 'remark',
+ 'photo',
+ 'd',
+ 't',
+ 'dt',
+ 'redundant',
+ ]),
+ "Columns of reflected table didn't equal expected "
+ "columns")
eq_(table.c.question.primary_key, True)
+
# disabled per http://www.sqlalchemy.org/trac/ticket/1660
# eq_(table.c.question.sequence.name, 'gen_testtable_id')
+
assert isinstance(table.c.question.type, Integer)
- eq_(table.c.question.server_default.arg.text, "42")
+ eq_(table.c.question.server_default.arg.text, '42')
assert isinstance(table.c.answer.type, String)
assert table.c.answer.type.length == 255
eq_(table.c.answer.server_default.arg.text, "'no answer'")
eq_(table.c.remark.server_default.arg.text, "''")
assert isinstance(table.c.photo.type, BLOB)
assert table.c.redundant.server_default is None
+
# The following assume a Dialect 3 database
+
assert isinstance(table.c.d.type, Date)
assert isinstance(table.c.t.type, Time)
assert isinstance(table.c.dt.type, DateTime)
class CompileTest(TestBase, AssertsCompiledSQL):
+
__dialect__ = firebird.FBDialect()
def test_alias(self):
t = table('sometable', column('col1'), column('col2'))
s = select([t.alias()])
- self.assert_compile(s, "SELECT sometable_1.col1, sometable_1.col2 FROM sometable AS sometable_1")
-
+ self.assert_compile(s,
+ 'SELECT sometable_1.col1, sometable_1.col2 '
+ 'FROM sometable AS sometable_1')
dialect = firebird.FBDialect()
dialect._version_two = False
- self.assert_compile(s, "SELECT sometable_1.col1, sometable_1.col2 FROM sometable sometable_1",
- dialect = dialect
- )
+ self.assert_compile(s,
+ 'SELECT sometable_1.col1, sometable_1.col2 '
+ 'FROM sometable sometable_1',
+ dialect=dialect)
def test_function(self):
- self.assert_compile(func.foo(1, 2), "foo(:foo_1, :foo_2)")
- self.assert_compile(func.current_time(), "CURRENT_TIME")
- self.assert_compile(func.foo(), "foo")
-
+ self.assert_compile(func.foo(1, 2), 'foo(:foo_1, :foo_2)')
+ self.assert_compile(func.current_time(), 'CURRENT_TIME')
+ self.assert_compile(func.foo(), 'foo')
m = MetaData()
- t = Table('sometable', m, Column('col1', Integer), Column('col2', Integer))
- self.assert_compile(select([func.max(t.c.col1)]), "SELECT max(sometable.col1) AS max_1 FROM sometable")
+ t = Table('sometable', m, Column('col1', Integer), Column('col2'
+ , Integer))
+ self.assert_compile(select([func.max(t.c.col1)]),
+ 'SELECT max(sometable.col1) AS max_1 FROM '
+ 'sometable')
def test_substring(self):
- self.assert_compile(func.substring('abc', 1, 2), "SUBSTRING(:substring_1 FROM :substring_2 FOR :substring_3)")
- self.assert_compile(func.substring('abc', 1), "SUBSTRING(:substring_1 FROM :substring_2)")
+ self.assert_compile(func.substring('abc', 1, 2),
+ 'SUBSTRING(:substring_1 FROM :substring_2 '
+ 'FOR :substring_3)')
+ self.assert_compile(func.substring('abc', 1),
+ 'SUBSTRING(:substring_1 FROM :substring_2)')
def test_update_returning(self):
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
- u = update(table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
- self.assert_compile(u, "UPDATE mytable SET name=:name RETURNING mytable.myid, mytable.name")
-
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
+ u = update(table1, values=dict(name='foo'
+ )).returning(table1.c.myid, table1.c.name)
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name RETURNING '
+ 'mytable.myid, mytable.name')
u = update(table1, values=dict(name='foo')).returning(table1)
- self.assert_compile(u, "UPDATE mytable SET name=:name "\
- "RETURNING mytable.myid, mytable.name, mytable.description")
-
- u = update(table1, values=dict(name='foo')).returning(func.length(table1.c.name))
- self.assert_compile(u, "UPDATE mytable SET name=:name RETURNING char_length(mytable.name) AS length_1")
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name RETURNING '
+ 'mytable.myid, mytable.name, '
+ 'mytable.description')
+ u = update(table1, values=dict(name='foo'
+ )).returning(func.length(table1.c.name))
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name RETURNING '
+ 'char_length(mytable.name) AS length_1')
def test_insert_returning(self):
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
- i = insert(table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (:name) RETURNING mytable.myid, mytable.name")
-
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
+ i = insert(table1, values=dict(name='foo'
+ )).returning(table1.c.myid, table1.c.name)
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES (:name) '
+ 'RETURNING mytable.myid, mytable.name')
i = insert(table1, values=dict(name='foo')).returning(table1)
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (:name) "\
- "RETURNING mytable.myid, mytable.name, mytable.description")
-
- i = insert(table1, values=dict(name='foo')).returning(func.length(table1.c.name))
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (:name) RETURNING char_length(mytable.name) AS length_1")
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES (:name) '
+ 'RETURNING mytable.myid, mytable.name, '
+ 'mytable.description')
+ i = insert(table1, values=dict(name='foo'
+ )).returning(func.length(table1.c.name))
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES (:name) '
+ 'RETURNING char_length(mytable.name) AS '
+ 'length_1')
def test_charset(self):
"""Exercise CHARACTER SET options on string types."""
- columns = [
- (firebird.CHAR, [1], {},
- 'CHAR(1)'),
- (firebird.CHAR, [1], {'charset' : 'OCTETS'},
- 'CHAR(1) CHARACTER SET OCTETS'),
- (firebird.VARCHAR, [1], {},
- 'VARCHAR(1)'),
- (firebird.VARCHAR, [1], {'charset' : 'OCTETS'},
- 'VARCHAR(1) CHARACTER SET OCTETS'),
- ]
-
+ columns = [(firebird.CHAR, [1], {}, 'CHAR(1)'), (firebird.CHAR,
+ [1], {'charset': 'OCTETS'},
+ 'CHAR(1) CHARACTER SET OCTETS'), (firebird.VARCHAR,
+ [1], {}, 'VARCHAR(1)'), (firebird.VARCHAR, [1],
+ {'charset': 'OCTETS'},
+ 'VARCHAR(1) CHARACTER SET OCTETS')]
for type_, args, kw, res in columns:
self.assert_compile(type_(*args, **kw), res)
-
-
class MiscTest(TestBase):
+
__only_on__ = 'firebird'
@testing.provide_metadata
def test_strlen(self):
- # On FB the length() function is implemented by an external
- # UDF, strlen(). Various SA tests fail because they pass a
- # parameter to it, and that does not work (it always results
- # the maximum string length the UDF was declared to accept).
- # This test checks that at least it works ok in other cases.
-
- t = Table('t1', metadata,
- Column('id', Integer, Sequence('t1idseq'), primary_key=True),
- Column('name', String(10))
- )
+
+ # On FB the length() function is implemented by an external UDF,
+ # strlen(). Various SA tests fail because they pass a parameter
+ # to it, and that does not work (it always results the maximum
+ # string length the UDF was declared to accept). This test
+ # checks that at least it works ok in other cases.
+
+ t = Table('t1', metadata, Column('id', Integer,
+ Sequence('t1idseq'), primary_key=True), Column('name'
+ , String(10)))
metadata.create_all()
t.insert(values=dict(name='dante')).execute()
t.insert(values=dict(name='alighieri')).execute()
- select([func.count(t.c.id)],func.length(t.c.name)==5).execute().first()[0] == 1
+ select([func.count(t.c.id)], func.length(t.c.name)
+ == 5).execute().first()[0] == 1
def test_server_version_info(self):
version = testing.db.dialect.server_version_info
- assert len(version) == 3, "Got strange version info: %s" % repr(version)
+ assert len(version) == 3, 'Got strange version info: %s' \
+ % repr(version)
@testing.provide_metadata
def test_rowcount_flag(self):
- engine = engines.testing_engine(options={'enable_rowcount':True})
+ engine = engines.testing_engine(options={'enable_rowcount'
+ : True})
assert engine.dialect.supports_sane_rowcount
metadata.bind = engine
- t = Table('t1', metadata,
- Column('data', String(10))
- )
+ t = Table('t1', metadata, Column('data', String(10)))
metadata.create_all()
- r = t.insert().execute({'data':'d1'}, {'data':'d2'}, {'data': 'd3'})
- r = t.update().where(t.c.data=='d2').values(data='d3').execute()
+ r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data'
+ : 'd3'})
+ r = t.update().where(t.c.data == 'd2').values(data='d3'
+ ).execute()
eq_(r.rowcount, 1)
r = t.delete().where(t.c.data == 'd3').execute()
eq_(r.rowcount, 2)
-
- r = t.delete().execution_options(enable_rowcount=False).execute()
+ r = \
+ t.delete().execution_options(enable_rowcount=False).execute()
eq_(r.rowcount, -1)
-
- engine = engines.testing_engine(options={'enable_rowcount':False})
+ engine = engines.testing_engine(options={'enable_rowcount'
+ : False})
assert not engine.dialect.supports_sane_rowcount
metadata.bind = engine
- r = t.insert().execute({'data':'d1'}, {'data':'d2'}, {'data':'d3'})
- r = t.update().where(t.c.data=='d2').values(data='d3').execute()
+ r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data'
+ : 'd3'})
+ r = t.update().where(t.c.data == 'd2').values(data='d3'
+ ).execute()
eq_(r.rowcount, -1)
r = t.delete().where(t.c.data == 'd3').execute()
eq_(r.rowcount, -1)
eq_(r.rowcount, 1)
def test_percents_in_text(self):
- for expr, result in (
- (text("select '%' from rdb$database"), '%'),
- (text("select '%%' from rdb$database"), '%%'),
- (text("select '%%%' from rdb$database"), '%%%'),
- (text("select 'hello % world' from rdb$database"), "hello % world")
- ):
+ for expr, result in (text("select '%' from rdb$database"), '%'
+ ), (text("select '%%' from rdb$database"),
+ '%%'), \
+ (text("select '%%%' from rdb$database"), '%%%'), \
+ (text("select 'hello % world' from rdb$database"),
+ 'hello % world'):
eq_(testing.db.scalar(expr), result)
class CompileTest(TestBase, AssertsCompiledSQL):
+
__only_on__ = 'informix'
__dialect__ = informix.InformixDialect()
-
- def test_statements(self):
- meta =MetaData()
- t1= Table('t1', meta, Column('col1', Integer, primary_key=True), Column('col2', String(50)))
- t2= Table('t2', meta, Column('col1', Integer, primary_key=True), Column('col2', String(50)), Column('col3', Integer, ForeignKey('t1.col1')))
-
- self.assert_compile(t1.select(), "SELECT t1.col1, t1.col2 FROM t1")
-
- self.assert_compile(select([t1, t2]).select_from(t1.join(t2)), "SELECT t1.col1, t1.col2, t2.col1, t2.col2, t2.col3 FROM t1 JOIN t2 ON t1.col1 = t2.col3")
- self.assert_compile(t1.update().values({t1.c.col1 : t1.c.col1 + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)')
-
-
+ def test_statements(self):
+ meta = MetaData()
+ t1 = Table('t1', meta, Column('col1', Integer,
+ primary_key=True), Column('col2', String(50)))
+ t2 = Table('t2', meta, Column('col1', Integer,
+ primary_key=True), Column('col2', String(50)),
+ Column('col3', Integer, ForeignKey('t1.col1')))
+ self.assert_compile(t1.select(),
+ 'SELECT t1.col1, t1.col2 FROM t1')
+ self.assert_compile(select([t1, t2]).select_from(t1.join(t2)),
+ 'SELECT t1.col1, t1.col2, t2.col1, '
+ 't2.col2, t2.col3 FROM t1 JOIN t2 ON '
+ 't1.col1 = t2.col3')
+ self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1
+ + 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)')
# TODO
-# - add "Database" test, a quick check for join behavior on different max versions
+# - add "Database" test, a quick check for join behavior on different
+# max versions
# - full max-specific reflection suite
# - datetime tests
# - the orm/query 'test_has' destabilizes the server- cover here
# -*- encoding: utf-8
from sqlalchemy.test.testing import eq_
-import datetime, os, re, warnings
+import datetime
+import os
+import re
+import warnings
from sqlalchemy import *
from sqlalchemy import types, exc, schema
from sqlalchemy.orm import *
from sqlalchemy.dialects.mssql import pyodbc, mxodbc
from sqlalchemy.engine import url
from sqlalchemy.test import *
-from sqlalchemy.test.testing import eq_, emits_warning_on, assert_raises_message
-
+from sqlalchemy.test.testing import eq_, emits_warning_on, \
+ assert_raises_message
class CompileTest(TestBase, AssertsCompiledSQL):
__dialect__ = mssql.dialect()
def test_insert(self):
t = table('sometable', column('somecolumn'))
- self.assert_compile(t.insert(), "INSERT INTO sometable (somecolumn) VALUES (:somecolumn)")
+ self.assert_compile(t.insert(),
+ 'INSERT INTO sometable (somecolumn) VALUES '
+ '(:somecolumn)')
def test_update(self):
t = table('sometable', column('somecolumn'))
- self.assert_compile(t.update(t.c.somecolumn==7), "UPDATE sometable SET somecolumn=:somecolumn WHERE sometable.somecolumn = :somecolumn_1", dict(somecolumn=10))
+ self.assert_compile(t.update(t.c.somecolumn == 7),
+ 'UPDATE sometable SET somecolumn=:somecolum'
+ 'n WHERE sometable.somecolumn = '
+ ':somecolumn_1', dict(somecolumn=10))
# TODO: should this be for *all* MS-SQL dialects ?
def test_mxodbc_binds(self):
- """mxodbc uses MS-SQL native binds, which aren't allowed in various places."""
+ """mxodbc uses MS-SQL native binds, which aren't allowed in
+ various places."""
mxodbc_dialect = mxodbc.dialect()
t = table('sometable', column('foo'))
),
(
select([t]).where(t.c.foo.in_(['x', 'y', 'z'])),
- "SELECT sometable.foo FROM sometable WHERE sometable.foo IN ('x', 'y', 'z')",
+ "SELECT sometable.foo FROM sometable WHERE sometable.foo "
+ "IN ('x', 'y', 'z')",
),
(
func.foobar("x", "y", 4, 5),
),
(
select([t]).where(func.len('xyz') > func.len(t.c.foo)),
- "SELECT sometable.foo FROM sometable WHERE len('xyz') > len(sometable.foo)",
+ "SELECT sometable.foo FROM sometable WHERE len('xyz') > "
+ "len(sometable.foo)",
)
]:
self.assert_compile(expr, compile, dialect=mxodbc_dialect)
"""
t = table('sometable', column('somecolumn'))
- self.assert_compile(t.select().where(t.c.somecolumn==t.select()), "SELECT sometable.somecolumn FROM sometable WHERE sometable.somecolumn IN (SELECT sometable.somecolumn FROM sometable)")
- self.assert_compile(t.select().where(t.c.somecolumn!=t.select()), "SELECT sometable.somecolumn FROM sometable WHERE sometable.somecolumn NOT IN (SELECT sometable.somecolumn FROM sometable)")
+ self.assert_compile(t.select().where(t.c.somecolumn
+ == t.select()),
+ 'SELECT sometable.somecolumn FROM '
+ 'sometable WHERE sometable.somecolumn IN '
+ '(SELECT sometable.somecolumn FROM '
+ 'sometable)')
+ self.assert_compile(t.select().where(t.c.somecolumn
+ != t.select()),
+ 'SELECT sometable.somecolumn FROM '
+ 'sometable WHERE sometable.somecolumn NOT '
+ 'IN (SELECT sometable.somecolumn FROM '
+ 'sometable)')
def test_count(self):
t = table('sometable', column('somecolumn'))
- self.assert_compile(t.count(), "SELECT count(sometable.somecolumn) AS tbl_row_count FROM sometable")
+ self.assert_compile(t.count(),
+ 'SELECT count(sometable.somecolumn) AS '
+ 'tbl_row_count FROM sometable')
def test_noorderby_insubquery(self):
- """test that the ms-sql dialect removes ORDER BY clauses from subqueries"""
+ """test that the ms-sql dialect removes ORDER BY clauses from
+ subqueries"""
table1 = table('mytable',
column('myid', Integer),
column('description', String),
)
- q = select([table1.c.myid], order_by=[table1.c.myid]).alias('foo')
+ q = select([table1.c.myid],
+ order_by=[table1.c.myid]).alias('foo')
crit = q.c.myid == table1.c.myid
- self.assert_compile(select(['*'], crit), """SELECT * FROM (SELECT mytable.myid AS myid FROM mytable) AS foo, mytable WHERE foo.myid = mytable.myid""")
+ self.assert_compile(select(['*'], crit),
+ "SELECT * FROM (SELECT mytable.myid AS "
+ "myid FROM mytable) AS foo, mytable WHERE "
+ "foo.myid = mytable.myid")
def test_aliases_schemas(self):
metadata = MetaData()
s = table4.select(use_labels=True)
c = s.compile(dialect=self.__dialect__)
print c.result_map
- assert table4.c.rem_id in set(c.result_map['remote_owner_remotetable_rem_id'][1])
-
- self.assert_compile(table4.select(), "SELECT remotetable_1.rem_id, remotetable_1.datatype_id, remotetable_1.value FROM remote_owner.remotetable AS remotetable_1")
-
- self.assert_compile(table4.select(use_labels=True), "SELECT remotetable_1.rem_id AS remote_owner_remotetable_rem_id, remotetable_1.datatype_id AS remote_owner_remotetable_datatype_id, remotetable_1.value AS remote_owner_remotetable_value FROM remote_owner.remotetable AS remotetable_1")
-
- self.assert_compile(table1.join(table4, table1.c.myid==table4.c.rem_id).select(), "SELECT mytable.myid, mytable.name, mytable.description, remotetable_1.rem_id, remotetable_1.datatype_id, remotetable_1.value FROM mytable JOIN remote_owner.remotetable AS remotetable_1 ON remotetable_1.rem_id = mytable.myid")
+ assert table4.c.rem_id \
+ in set(c.result_map['remote_owner_remotetable_rem_id'][1])
+ self.assert_compile(table4.select(),
+ 'SELECT remotetable_1.rem_id, '
+ 'remotetable_1.datatype_id, '
+ 'remotetable_1.value FROM '
+ 'remote_owner.remotetable AS remotetable_1')
+ self.assert_compile(table4.select(use_labels=True),
+ 'SELECT remotetable_1.rem_id AS '
+ 'remote_owner_remotetable_rem_id, '
+ 'remotetable_1.datatype_id AS '
+ 'remote_owner_remotetable_datatype_id, '
+ 'remotetable_1.value AS '
+ 'remote_owner_remotetable_value FROM '
+ 'remote_owner.remotetable AS remotetable_1')
+ self.assert_compile(table1.join(table4, table1.c.myid
+ == table4.c.rem_id).select(),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, remotetable_1.rem_id,'
+ ' remotetable_1.datatype_id, '
+ 'remotetable_1.value FROM mytable JOIN '
+ 'remote_owner.remotetable AS remotetable_1 '
+ 'ON remotetable_1.rem_id = mytable.myid')
def test_delete_schema(self):
metadata = MetaData()
- tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='paj')
- self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM paj.test WHERE paj.test.id = :id_1")
-
- s = select([tbl.c.id]).where(tbl.c.id==1)
- self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM paj.test WHERE paj.test.id IN (SELECT test_1.id FROM paj.test AS test_1 WHERE test_1.id = :id_1)")
+ tbl = Table('test', metadata, Column('id', Integer,
+ primary_key=True), schema='paj')
+ self.assert_compile(tbl.delete(tbl.c.id == 1),
+ 'DELETE FROM paj.test WHERE paj.test.id = '
+ ':id_1')
+ s = select([tbl.c.id]).where(tbl.c.id == 1)
+ self.assert_compile(tbl.delete().where(tbl.c.id == s),
+ 'DELETE FROM paj.test WHERE paj.test.id IN '
+ '(SELECT test_1.id FROM paj.test AS test_1 '
+ 'WHERE test_1.id = :id_1)')
def test_delete_schema_multipart(self):
metadata = MetaData()
- tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana.paj')
- self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM banana.paj.test WHERE banana.paj.test.id = :id_1")
-
- s = select([tbl.c.id]).where(tbl.c.id==1)
- self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM banana.paj.test WHERE banana.paj.test.id IN (SELECT test_1.id FROM banana.paj.test AS test_1 WHERE test_1.id = :id_1)")
+ tbl = Table('test', metadata, Column('id', Integer,
+ primary_key=True), schema='banana.paj')
+ self.assert_compile(tbl.delete(tbl.c.id == 1),
+ 'DELETE FROM banana.paj.test WHERE '
+ 'banana.paj.test.id = :id_1')
+ s = select([tbl.c.id]).where(tbl.c.id == 1)
+ self.assert_compile(tbl.delete().where(tbl.c.id == s),
+ 'DELETE FROM banana.paj.test WHERE '
+ 'banana.paj.test.id IN (SELECT test_1.id '
+ 'FROM banana.paj.test AS test_1 WHERE '
+ 'test_1.id = :id_1)')
def test_delete_schema_multipart_needs_quoting(self):
metadata = MetaData()
- tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana split.paj')
- self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM [banana split].paj.test WHERE [banana split].paj.test.id = :id_1")
-
- s = select([tbl.c.id]).where(tbl.c.id==1)
- self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM [banana split].paj.test WHERE [banana split].paj.test.id IN (SELECT test_1.id FROM [banana split].paj.test AS test_1 WHERE test_1.id = :id_1)")
+ tbl = Table('test', metadata, Column('id', Integer,
+ primary_key=True), schema='banana split.paj')
+ self.assert_compile(tbl.delete(tbl.c.id == 1),
+ 'DELETE FROM [banana split].paj.test WHERE '
+ '[banana split].paj.test.id = :id_1')
+ s = select([tbl.c.id]).where(tbl.c.id == 1)
+ self.assert_compile(tbl.delete().where(tbl.c.id == s),
+ 'DELETE FROM [banana split].paj.test WHERE '
+ '[banana split].paj.test.id IN (SELECT '
+ 'test_1.id FROM [banana split].paj.test AS '
+ 'test_1 WHERE test_1.id = :id_1)')
def test_delete_schema_multipart_both_need_quoting(self):
metadata = MetaData()
- tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana split.paj with a space')
- self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM [banana split].[paj with a space].test WHERE [banana split].[paj with a space].test.id = :id_1")
-
- s = select([tbl.c.id]).where(tbl.c.id==1)
- self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM [banana split].[paj with a space].test WHERE [banana split].[paj with a space].test.id IN (SELECT test_1.id FROM [banana split].[paj with a space].test AS test_1 WHERE test_1.id = :id_1)")
+ tbl = Table('test', metadata, Column('id', Integer,
+ primary_key=True),
+ schema='banana split.paj with a space')
+ self.assert_compile(tbl.delete(tbl.c.id == 1),
+ 'DELETE FROM [banana split].[paj with a '
+ 'space].test WHERE [banana split].[paj '
+ 'with a space].test.id = :id_1')
+ s = select([tbl.c.id]).where(tbl.c.id == 1)
+ self.assert_compile(tbl.delete().where(tbl.c.id == s),
+ 'DELETE FROM [banana split].[paj with a '
+ 'space].test WHERE [banana split].[paj '
+ 'with a space].test.id IN (SELECT '
+ 'test_1.id FROM [banana split].[paj with a '
+ 'space].test AS test_1 WHERE test_1.id = '
+ ':id_1)')
def test_union(self):
- t1 = table('t1',
- column('col1'),
- column('col2'),
- column('col3'),
- column('col4')
- )
- t2 = table('t2',
- column('col1'),
- column('col2'),
- column('col3'),
- column('col4'))
-
- (s1, s2) = (
- select([t1.c.col3.label('col3'), t1.c.col4.label('col4')],
- t1.c.col2.in_(["t1col2r1", "t1col2r2"])),
- select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
- t2.c.col2.in_(["t2col2r2", "t2col2r3"]))
- )
+ t1 = table('t1', column('col1'), column('col2'), column('col3'
+ ), column('col4'))
+ t2 = table('t2', column('col1'), column('col2'), column('col3'
+ ), column('col4'))
+ s1, s2 = select([t1.c.col3.label('col3'), t1.c.col4.label('col4'
+ )], t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
+ select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
+ t2.c.col2.in_(['t2col2r2', 't2col2r3']))
u = union(s1, s2, order_by=['col3', 'col4'])
- self.assert_compile(u,
- "SELECT t1.col3 AS col3, t1.col4 AS col4 FROM t1 WHERE t1.col2 IN "
- "(:col2_1, :col2_2) "\
- "UNION SELECT t2.col3 AS col3, t2.col4 AS col4 FROM t2 WHERE t2.col2 "
- "IN (:col2_3, :col2_4) ORDER BY col3, col4")
-
- self.assert_compile(u.alias('bar').select(),
- "SELECT bar.col3, bar.col4 FROM (SELECT t1.col3 AS col3, "
- "t1.col4 AS col4 FROM t1 WHERE "\
- "t1.col2 IN (:col2_1, :col2_2) UNION SELECT t2.col3 AS col3, "
- "t2.col4 AS col4 FROM t2 WHERE t2.col2 IN (:col2_3, :col2_4)) "
- "AS bar")
+ self.assert_compile(u,
+ 'SELECT t1.col3 AS col3, t1.col4 AS col4 '
+ 'FROM t1 WHERE t1.col2 IN (:col2_1, '
+ ':col2_2) UNION SELECT t2.col3 AS col3, '
+ 't2.col4 AS col4 FROM t2 WHERE t2.col2 IN '
+ '(:col2_3, :col2_4) ORDER BY col3, col4')
+ self.assert_compile(u.alias('bar').select(),
+ 'SELECT bar.col3, bar.col4 FROM (SELECT '
+ 't1.col3 AS col3, t1.col4 AS col4 FROM t1 '
+ 'WHERE t1.col2 IN (:col2_1, :col2_2) UNION '
+ 'SELECT t2.col3 AS col3, t2.col4 AS col4 '
+ 'FROM t2 WHERE t2.col2 IN (:col2_3, '
+ ':col2_4)) AS bar')
def test_function(self):
- self.assert_compile(func.foo(1, 2), "foo(:foo_1, :foo_2)")
- self.assert_compile(func.current_time(), "CURRENT_TIME")
- self.assert_compile(func.foo(), "foo()")
-
+ self.assert_compile(func.foo(1, 2), 'foo(:foo_1, :foo_2)')
+ self.assert_compile(func.current_time(), 'CURRENT_TIME')
+ self.assert_compile(func.foo(), 'foo()')
m = MetaData()
- t = Table('sometable', m, Column('col1', Integer), Column('col2', Integer))
- self.assert_compile(select([func.max(t.c.col1)]), "SELECT max(sometable.col1) AS max_1 FROM sometable")
+ t = Table('sometable', m, Column('col1', Integer), Column('col2'
+ , Integer))
+ self.assert_compile(select([func.max(t.c.col1)]),
+ 'SELECT max(sometable.col1) AS max_1 FROM '
+ 'sometable')
def test_function_overrides(self):
self.assert_compile(func.current_date(), "GETDATE()")
'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)
def test_update_returning(self):
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
- u = update(table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
- self.assert_compile(u, "UPDATE mytable SET name=:name OUTPUT inserted.myid, inserted.name")
-
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
+ u = update(table1, values=dict(name='foo'
+ )).returning(table1.c.myid, table1.c.name)
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name OUTPUT '
+ 'inserted.myid, inserted.name')
u = update(table1, values=dict(name='foo')).returning(table1)
- self.assert_compile(u, "UPDATE mytable SET name=:name OUTPUT inserted.myid, "
- "inserted.name, inserted.description")
-
- u = update(table1, values=dict(name='foo')).returning(table1).where(table1.c.name=='bar')
- self.assert_compile(u, "UPDATE mytable SET name=:name OUTPUT inserted.myid, "
- "inserted.name, inserted.description WHERE mytable.name = :name_1")
-
- u = update(table1, values=dict(name='foo')).returning(func.length(table1.c.name))
- self.assert_compile(u, "UPDATE mytable SET name=:name OUTPUT LEN(inserted.name) AS length_1")
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name OUTPUT '
+ 'inserted.myid, inserted.name, '
+ 'inserted.description')
+ u = update(table1, values=dict(name='foo'
+ )).returning(table1).where(table1.c.name == 'bar')
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name OUTPUT '
+ 'inserted.myid, inserted.name, '
+ 'inserted.description WHERE mytable.name = '
+ ':name_1')
+ u = update(table1, values=dict(name='foo'
+ )).returning(func.length(table1.c.name))
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=:name OUTPUT '
+ 'LEN(inserted.name) AS length_1')
def test_delete_returning(self):
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
d = delete(table1).returning(table1.c.myid, table1.c.name)
- self.assert_compile(d, "DELETE FROM mytable OUTPUT deleted.myid, deleted.name")
-
- d = delete(table1).where(table1.c.name=='bar').returning(table1.c.myid, table1.c.name)
- self.assert_compile(d, "DELETE FROM mytable OUTPUT deleted.myid, deleted.name WHERE mytable.name = :name_1")
-
+ self.assert_compile(d,
+ 'DELETE FROM mytable OUTPUT deleted.myid, '
+ 'deleted.name')
+ d = delete(table1).where(table1.c.name == 'bar'
+ ).returning(table1.c.myid,
+ table1.c.name)
+ self.assert_compile(d,
+ 'DELETE FROM mytable OUTPUT deleted.myid, '
+ 'deleted.name WHERE mytable.name = :name_1')
def test_insert_returning(self):
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
- i = insert(table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
- self.assert_compile(i, "INSERT INTO mytable (name) OUTPUT inserted.myid, inserted.name VALUES (:name)")
-
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
+ i = insert(table1, values=dict(name='foo'
+ )).returning(table1.c.myid, table1.c.name)
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) OUTPUT '
+ 'inserted.myid, inserted.name VALUES '
+ '(:name)')
i = insert(table1, values=dict(name='foo')).returning(table1)
- self.assert_compile(i, "INSERT INTO mytable (name) OUTPUT inserted.myid, "
- "inserted.name, inserted.description VALUES (:name)")
-
- i = insert(table1, values=dict(name='foo')).returning(func.length(table1.c.name))
- self.assert_compile(i, "INSERT INTO mytable (name) OUTPUT LEN(inserted.name) AS length_1 VALUES (:name)")
-
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) OUTPUT '
+ 'inserted.myid, inserted.name, '
+ 'inserted.description VALUES (:name)')
+ i = insert(table1, values=dict(name='foo'
+ )).returning(func.length(table1.c.name))
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) OUTPUT '
+ 'LEN(inserted.name) AS length_1 VALUES '
+ '(:name)')
class IdentityInsertTest(TestBase, AssertsCompiledSQL):
metadata.drop_all()
def test_compiled(self):
- self.assert_compile(cattable.insert().values(id=9, description='Python'), "INSERT INTO cattable (id, description) VALUES (:id, :description)")
+ self.assert_compile(cattable.insert().values(id=9,
+ description='Python'),
+ 'INSERT INTO cattable (id, description) '
+ 'VALUES (:id, :description)')
def test_execute(self):
cattable.insert().values(id=9, description='Python').execute()
eq_((10, 'PHP'), lastcat.first())
def test_executemany(self):
- cattable.insert().execute([
- {'id': 89, 'description': 'Python'},
- {'id': 8, 'description': 'Ruby'},
- {'id': 3, 'description': 'Perl'},
- {'id': 1, 'description': 'Java'},
- ])
-
+ cattable.insert().execute([{'id': 89, 'description': 'Python'},
+ {'id': 8, 'description': 'Ruby'},
+ {'id': 3, 'description': 'Perl'},
+ {'id': 1, 'description': 'Java'}])
cats = cattable.select().order_by(cattable.c.id).execute()
- eq_([(1, 'Java'), (3, 'Perl'), (8, 'Ruby'), (89, 'Python')], list(cats))
-
- cattable.insert().execute([
- {'description': 'PHP'},
- {'description': 'Smalltalk'},
- ])
-
- lastcats = cattable.select().order_by(desc(cattable.c.id)).limit(2).execute()
+ eq_([(1, 'Java'), (3, 'Perl'), (8, 'Ruby'), (89, 'Python')],
+ list(cats))
+ cattable.insert().execute([{'description': 'PHP'},
+ {'description': 'Smalltalk'}])
+ lastcats = \
+ cattable.select().order_by(desc(cattable.c.id)).limit(2).execute()
eq_([(91, 'Smalltalk'), (90, 'PHP')], list(lastcats))
def test_basic_reflection(self):
meta = MetaData(testing.db)
- users = Table('engine_users', meta,
+ users = Table(
+ 'engine_users',
+ meta,
Column('user_id', types.INT, primary_key=True),
Column('user_name', types.VARCHAR(20), nullable=False),
Column('test1', types.CHAR(5), nullable=False),
Column('test2', types.Float(5), nullable=False),
Column('test3', types.Text),
- Column('test4', types.Numeric, nullable = False),
+ Column('test4', types.Numeric, nullable=False),
Column('test5', types.DateTime),
Column('parent_user_id', types.Integer,
ForeignKey('engine_users.user_id')),
Column('test6', types.DateTime, nullable=False),
Column('test7', types.Text),
Column('test8', types.LargeBinary),
- Column('test_passivedefault2', types.Integer, server_default='5'),
+ Column('test_passivedefault2', types.Integer,
+ server_default='5'),
Column('test9', types.BINARY(100)),
Column('test_numeric', types.Numeric()),
test_needs_fk=True,
- )
+ )
- addresses = Table('engine_email_addresses', meta,
- Column('address_id', types.Integer, primary_key = True),
- Column('remote_user_id', types.Integer, ForeignKey(users.c.user_id)),
+ addresses = Table(
+ 'engine_email_addresses',
+ meta,
+ Column('address_id', types.Integer, primary_key=True),
+ Column('remote_user_id', types.Integer,
+ ForeignKey(users.c.user_id)),
Column('email_address', types.String(20)),
test_needs_fk=True,
- )
+ )
meta.create_all()
try:
meta2 = MetaData()
- reflected_users = Table('engine_users', meta2, autoload=True,
+ reflected_users = Table('engine_users', meta2,
+ autoload=True,
autoload_with=testing.db)
- reflected_addresses = Table('engine_email_addresses', meta2,
- autoload=True, autoload_with=testing.db)
+ reflected_addresses = Table('engine_email_addresses',
+ meta2, autoload=True, autoload_with=testing.db)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
finally:
class QueryUnicodeTest(TestBase):
+
__only_on__ = 'mssql'
def test_convert_unicode(self):
meta = MetaData(testing.db)
- t1 = Table('unitest_table', meta,
- Column('id', Integer, primary_key=True),
- Column('descr', mssql.MSText(convert_unicode=True)))
+ t1 = Table('unitest_table', meta, Column('id', Integer,
+ primary_key=True), Column('descr',
+ mssql.MSText(convert_unicode=True)))
meta.create_all()
con = testing.db.connect()
- # encode in UTF-8 (sting object) because this is the default dialect encoding
- con.execute(u"insert into unitest_table values ('bien mangé')".encode('UTF-8'))
+ # encode in UTF-8 (sting object) because this is the default
+ # dialect encoding
+ con.execute(u"insert into unitest_table values ('bien u\
+ umang\xc3\xa9')".encode('UTF-8'))
try:
r = t1.select().execute().first()
- assert isinstance(r[1], unicode), '%s is %s instead of unicode, working on %s' % (
- r[1], type(r[1]), meta.bind)
-
+ assert isinstance(r[1], unicode), \
+ '%s is %s instead of unicode, working on %s' % (r[1],
+ type(r[1]), meta.bind)
finally:
meta.drop_all()
# test should be written.
meta = MetaData(testing.db)
t1 = Table('t1', meta,
- Column('id', Integer, Sequence('fred', 100, 1), primary_key=True),
+ Column('id', Integer, Sequence('fred', 100, 1),
+ primary_key=True),
Column('descr', String(200)),
- # the following flag will prevent the MSSQLCompiler.returning_clause
- # from getting called, though the ExecutionContext will still have
- # a _select_lastrowid, so the SELECT SCOPE_IDENTITY() will hopefully
- # be called instead.
+ # the following flag will prevent the
+ # MSSQLCompiler.returning_clause from getting called,
+ # though the ExecutionContext will still have a
+ # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will
+ # hopefully be called instead.
implicit_returning = False
)
t2 = Table('t2', meta,
- Column('id', Integer, Sequence('fred', 200, 1), primary_key=True),
+ Column('id', Integer, Sequence('fred', 200, 1),
+ primary_key=True),
Column('descr', String(200)))
meta.create_all()
con = testing.db.connect()
meta = MetaData(testing.db)
con = testing.db.connect()
con.execute('create schema paj')
- tbl = Table('test', meta, Column('id', Integer, primary_key=True), schema='paj')
+ tbl = Table('test', meta,
+ Column('id', Integer, primary_key=True), schema='paj')
tbl.create()
try:
tbl.insert().execute({'id':1})
def test_returning_no_autoinc(self):
meta = MetaData(testing.db)
-
- table = Table('t1', meta, Column('id', Integer, primary_key=True), Column('data', String(50)))
+ table = Table('t1', meta, Column('id', Integer,
+ primary_key=True), Column('data', String(50)))
table.create()
try:
- result = table.insert().values(id=1, data=func.lower("SomeString")).returning(table.c.id, table.c.data).execute()
- eq_(result.fetchall(), [(1, 'somestring',)])
+ result = table.insert().values(id=1,
+ data=func.lower('SomeString'
+ )).returning(table.c.id, table.c.data).execute()
+ eq_(result.fetchall(), [(1, 'somestring')])
finally:
- # this will hang if the "SET IDENTITY_INSERT t1 OFF" occurs before the
- # result is fetched
+
+ # this will hang if the "SET IDENTITY_INSERT t1 OFF" occurs
+ # before the result is fetched
+
table.drop()
def test_delete_schema(self):
meta = MetaData(testing.db)
con = testing.db.connect()
con.execute('create schema paj')
- tbl = Table('test', meta, Column('id', Integer, primary_key=True), schema='paj')
+ tbl = Table('test', meta, Column('id', Integer,
+ primary_key=True), schema='paj')
tbl.create()
try:
- tbl.insert().execute({'id':1})
+ tbl.insert().execute({'id': 1})
tbl.delete(tbl.c.id == 1).execute()
finally:
tbl.drop()
self.column = t.c.test_column
dialect = mssql.dialect()
- self.ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t))
+ self.ddl_compiler = dialect.ddl_compiler(dialect,
+ schema.CreateTable(t))
def _column_spec(self):
return self.ddl_compiler.get_column_specification(self.column)
def full_text_search_missing():
- """Test if full text search is not implemented and return False if
+ """Test if full text search is not implemented and return False if
it is and True otherwise."""
try:
connection = testing.db.connect()
try:
- connection.execute("CREATE FULLTEXT CATALOG Catalog AS DEFAULT")
+ connection.execute('CREATE FULLTEXT CATALOG Catalog AS '
+ 'DEFAULT')
return False
except:
return True
connection.close()
class MatchTest(TestBase, AssertsCompiledSQL):
+
__only_on__ = 'mssql'
- __skip_if__ = (full_text_search_missing, )
+ __skip_if__ = full_text_search_missing,
@classmethod
def setup_class(cls):
global metadata, cattable, matchtable
metadata = MetaData(testing.db)
-
- cattable = Table('cattable', metadata,
- Column('id', Integer),
- Column('description', String(50)),
- PrimaryKeyConstraint('id', name='PK_cattable'),
- )
- matchtable = Table('matchtable', metadata,
+ cattable = Table('cattable', metadata, Column('id', Integer),
+ Column('description', String(50)),
+ PrimaryKeyConstraint('id', name='PK_cattable'))
+ matchtable = Table(
+ 'matchtable',
+ metadata,
Column('id', Integer),
Column('title', String(200)),
Column('category_id', Integer, ForeignKey('cattable.id')),
PrimaryKeyConstraint('id', name='PK_matchtable'),
- )
+ )
DDL("""CREATE FULLTEXT INDEX
ON cattable (description)
- KEY INDEX PK_cattable"""
- ).execute_at('after-create', matchtable)
+ KEY INDEX PK_cattable""").execute_at('after-create'
+ , matchtable)
DDL("""CREATE FULLTEXT INDEX
ON matchtable (title)
- KEY INDEX PK_matchtable"""
- ).execute_at('after-create', matchtable)
+ KEY INDEX PK_matchtable""").execute_at('after-create'
+ , matchtable)
metadata.create_all()
-
- cattable.insert().execute([
- {'id': 1, 'description': 'Python'},
- {'id': 2, 'description': 'Ruby'},
- ])
- matchtable.insert().execute([
- {'id': 1, 'title': 'Agile Web Development with Rails', 'category_id': 2},
- {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
- {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
- {'id': 4, 'title': 'The Definitive Guide to Django', 'category_id': 1},
- {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}
- ])
- DDL("WAITFOR DELAY '00:00:05'").execute(bind=engines.testing_engine())
+ cattable.insert().execute([{'id': 1, 'description': 'Python'},
+ {'id': 2, 'description': 'Ruby'}])
+ matchtable.insert().execute([{'id': 1, 'title'
+ : 'Agile Web Development with Rails'
+ , 'category_id': 2}, {'id': 2,
+ 'title': 'Dive Into Python',
+ 'category_id': 1}, {'id': 3, 'title'
+ : "Programming Matz's Ruby",
+ 'category_id': 2}, {'id': 4, 'title'
+ : 'The Definitive Guide to Django',
+ 'category_id': 1}, {'id': 5, 'title'
+ : 'Python in a Nutshell',
+ 'category_id': 1}])
+ DDL("WAITFOR DELAY '00:00:05'"
+ ).execute(bind=engines.testing_engine())
@classmethod
def teardown_class(cls):
connection.close()
def test_expression(self):
- self.assert_compile(matchtable.c.title.match('somstr'), "CONTAINS (matchtable.title, ?)")
+ self.assert_compile(matchtable.c.title.match('somstr'),
+ 'CONTAINS (matchtable.title, ?)')
def test_simple_match(self):
- results = matchtable.select().where(matchtable.c.title.match('python')).order_by(matchtable.c.id).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match('python'
+ )).order_by(matchtable.c.id).execute().fetchall()
eq_([2, 5], [r.id for r in results])
def test_simple_match_with_apostrophe(self):
- results = matchtable.select().where(matchtable.c.title.match("Matz's")).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match("Matz's"
+ )).execute().fetchall()
eq_([3], [r.id for r in results])
def test_simple_prefix_match(self):
- results = matchtable.select().where(matchtable.c.title.match('"nut*"')).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match('"nut*"'
+ )).execute().fetchall()
eq_([5], [r.id for r in results])
def test_simple_inflectional_match(self):
- results = matchtable.select().where(matchtable.c.title.match('FORMSOF(INFLECTIONAL, "dives")')).execute().fetchall()
+ results = \
+ matchtable.select().where(
+ matchtable.c.title.match('FORMSOF(INFLECTIONAL, "dives")'
+ )).execute().fetchall()
eq_([2], [r.id for r in results])
def test_or_match(self):
- results1 = matchtable.select().where(or_(matchtable.c.title.match('nutshell'),
- matchtable.c.title.match('ruby'))
- ).order_by(matchtable.c.id).execute().fetchall()
+ results1 = \
+ matchtable.select().where(or_(matchtable.c.title.match('nutshell'
+ ), matchtable.c.title.match('ruby'
+ ))).order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results1])
- results2 = matchtable.select().where(matchtable.c.title.match('nutshell OR ruby'),
- ).order_by(matchtable.c.id).execute().fetchall()
- eq_([3, 5], [r.id for r in results2])
+ results2 = \
+ matchtable.select().where(
+ matchtable.c.title.match('nutshell OR ruby'
+ )).order_by(matchtable.c.id).execute().fetchall()
+ eq_([3, 5], [r.id for r in results2])
def test_and_match(self):
- results1 = matchtable.select().where(and_(matchtable.c.title.match('python'),
- matchtable.c.title.match('nutshell'))
- ).execute().fetchall()
+ results1 = \
+ matchtable.select().where(and_(matchtable.c.title.match('python'
+ ), matchtable.c.title.match('nutshell'
+ ))).execute().fetchall()
eq_([5], [r.id for r in results1])
- results2 = matchtable.select().where(matchtable.c.title.match('python AND nutshell'),
- ).execute().fetchall()
+ results2 = \
+ matchtable.select().where(
+ matchtable.c.title.match('python AND nutshell'
+ )).execute().fetchall()
eq_([5], [r.id for r in results2])
def test_match_across_joins(self):
- results = matchtable.select().where(and_(cattable.c.id==matchtable.c.category_id,
- or_(cattable.c.description.match('Ruby'),
- matchtable.c.title.match('nutshell')))
- ).order_by(matchtable.c.id).execute().fetchall()
+ results = matchtable.select().where(and_(cattable.c.id
+ == matchtable.c.category_id,
+ or_(cattable.c.description.match('Ruby'),
+ matchtable.c.title.match('nutshell'
+ )))).order_by(matchtable.c.id).execute().fetchall()
eq_([1, 3, 5], [r.id for r in results])
eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_dsn_extra(self):
- u = url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_english&foo=bar')
+ u = \
+ url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
+ 'english&foo=bar')
connection = dialect.create_connect_args(u)
- eq_([['dsn=mydsn;UID=username;PWD=password;LANGUAGE=us_english;foo=bar'], {}], connection)
+ eq_([['dsn=mydsn;UID=username;PWD=password;LANGUAGE=us_english;'
+ 'foo=bar'], {}], connection)
def test_pyodbc_connect(self):
u = url.make_url('mssql://username:password@hostspec/database')
connection = dialect.create_connect_args(u)
- eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UID=username;PWD=password'], {}], connection)
+ eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+ 'D=username;PWD=password'], {}], connection)
def test_pyodbc_connect_comma_port(self):
- u = url.make_url('mssql://username:password@hostspec:12345/database')
+ u = \
+ url.make_url('mssql://username:password@hostspec:12345/data'
+ 'base')
connection = dialect.create_connect_args(u)
- eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=database;UID=username;PWD=password'], {}], connection)
+ eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=datab'
+ 'ase;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_config_port(self):
- u = url.make_url('mssql://username:password@hostspec/database?port=12345')
+ u = \
+ url.make_url('mssql://username:password@hostspec/database?p'
+ 'ort=12345')
connection = dialect.create_connect_args(u)
- eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UID=username;PWD=password;port=12345'], {}], connection)
+ eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+ 'D=username;PWD=password;port=12345'], {}], connection)
def test_pyodbc_extra_connect(self):
- u = url.make_url('mssql://username:password@hostspec/database?LANGUAGE=us_english&foo=bar')
+ u = \
+ url.make_url('mssql://username:password@hostspec/database?L'
+ 'ANGUAGE=us_english&foo=bar')
connection = dialect.create_connect_args(u)
eq_(connection[1], {})
- eq_(connection[0][0] in
- ('DRIVER={SQL Server};Server=hostspec;Database=database;UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
- 'DRIVER={SQL Server};Server=hostspec;Database=database;UID=username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
+ eq_(connection[0][0]
+ in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
+ 'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
+ 'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
+ 'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
def test_pyodbc_odbc_connect(self):
- u = url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword')
+ u = \
+ url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
+ '%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
+ '%3BUID%3Dusername%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
- eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UID=username;PWD=password'], {}], connection)
+ eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+ 'D=username;PWD=password'], {}], connection)
def test_pyodbc_odbc_connect_with_dsn(self):
- u = url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword')
+ u = \
+ url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
+ '%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
+ )
connection = dialect.create_connect_args(u)
- eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'], {}], connection)
+ eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'],
+ {}], connection)
def test_pyodbc_odbc_connect_ignores_other_values(self):
- u = url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?odbc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword')
+ u = \
+ url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
+ 'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
+ '%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Duse'
+ 'rname%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
- eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UID=username;PWD=password'], {}], connection)
+ eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
+ 'D=username;PWD=password'], {}], connection)
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
+
def _bad_version(connection):
- return (95, 10, 255)
+ return 95, 10, 255
+
engine.dialect._get_server_version_info = _bad_version
- assert_raises_message(
- exc.SAWarning, "Unrecognized server version info", engine.connect
- )
+ assert_raises_message(exc.SAWarning,
+ 'Unrecognized server version info',
+ engine.connect)
class TypesTest(TestBase, AssertsExecutionResults, ComparesTables):
__only_on__ = 'mssql'
def teardown(self):
metadata.drop_all()
- @testing.fails_on_everything_except('mssql+pyodbc', 'this is some pyodbc-specific feature')
+ @testing.fails_on_everything_except('mssql+pyodbc',
+ 'this is some pyodbc-specific feature')
def test_decimal_notation(self):
import decimal
- numeric_table = Table('numeric_table', metadata,
- Column('id', Integer, Sequence('numeric_id_seq', optional=True), primary_key=True),
- Column('numericcol', Numeric(precision=38, scale=20, asdecimal=True))
- )
+ numeric_table = Table('numeric_table', metadata, Column('id',
+ Integer, Sequence('numeric_id_seq',
+ optional=True), primary_key=True),
+ Column('numericcol',
+ Numeric(precision=38, scale=20,
+ asdecimal=True)))
metadata.create_all()
-
- test_items = [decimal.Decimal(d) for d in '1500000.00000000000000000000',
- '-1500000.00000000000000000000', '1500000',
- '0.0000000000000000002', '0.2', '-0.0000000000000000002', '-2E-2',
- '156666.458923543', '-156666.458923543', '1', '-1', '-1234', '1234',
- '2E-12', '4E8', '3E-6', '3E-7', '4.1', '1E-1', '1E-2', '1E-3',
- '1E-4', '1E-5', '1E-6', '1E-7', '1E-1', '1E-8', '0.2732E2',
- '-0.2432E2', '4.35656E2',
- '-02452E-2', '45125E-2',
- '1234.58965E-2', '1.521E+15', '-1E-25', '1E-25', '1254E-25', '-1203E-25',
- '0', '-0.00', '-0', '4585E12', '000000000000000000012', '000000000000.32E12',
- '00000000000000.1E+12', '000000000000.2E-32']
+ test_items = [decimal.Decimal(d) for d in (
+ '1500000.00000000000000000000',
+ '-1500000.00000000000000000000',
+ '1500000',
+ '0.0000000000000000002',
+ '0.2',
+ '-0.0000000000000000002',
+ '-2E-2',
+ '156666.458923543',
+ '-156666.458923543',
+ '1',
+ '-1',
+ '-1234',
+ '1234',
+ '2E-12',
+ '4E8',
+ '3E-6',
+ '3E-7',
+ '4.1',
+ '1E-1',
+ '1E-2',
+ '1E-3',
+ '1E-4',
+ '1E-5',
+ '1E-6',
+ '1E-7',
+ '1E-1',
+ '1E-8',
+ '0.2732E2',
+ '-0.2432E2',
+ '4.35656E2',
+ '-02452E-2',
+ '45125E-2',
+ '1234.58965E-2',
+ '1.521E+15',
+ '-1E-25',
+ '1E-25',
+ '1254E-25',
+ '-1203E-25',
+ '0',
+ '-0.00',
+ '-0',
+ '4585E12',
+ '000000000000000000012',
+ '000000000000.32E12',
+ '00000000000000.1E+12',
+ '000000000000.2E-32',
+ )]
for value in test_items:
numeric_table.insert().execute(numericcol=value)
assert value[0] in test_items, "%r not in test_items" % value[0]
def test_float(self):
- float_table = Table('float_table', metadata,
- Column('id', Integer, Sequence('numeric_id_seq', optional=True), primary_key=True),
- Column('floatcol', Float())
- )
+ float_table = Table('float_table', metadata, Column('id',
+ Integer, Sequence('numeric_id_seq',
+ optional=True), primary_key=True),
+ Column('floatcol', Float()))
metadata.create_all()
-
try:
- test_items = [float(d) for d in '1500000.00000000000000000000',
- '-1500000.00000000000000000000', '1500000',
- '0.0000000000000000002', '0.2', '-0.0000000000000000002',
- '156666.458923543', '-156666.458923543', '1', '-1', '1234',
- '2E-12', '4E8', '3E-6', '3E-7', '4.1', '1E-1', '1E-2', '1E-3',
- '1E-4', '1E-5', '1E-6', '1E-7', '1E-8']
+ test_items = [float(d) for d in (
+ '1500000.00000000000000000000',
+ '-1500000.00000000000000000000',
+ '1500000',
+ '0.0000000000000000002',
+ '0.2',
+ '-0.0000000000000000002',
+ '156666.458923543',
+ '-156666.458923543',
+ '1',
+ '-1',
+ '1234',
+ '2E-12',
+ '4E8',
+ '3E-6',
+ '3E-7',
+ '4.1',
+ '1E-1',
+ '1E-2',
+ '1E-3',
+ '1E-4',
+ '1E-5',
+ '1E-6',
+ '1E-7',
+ '1E-8',
+ )]
for value in test_items:
float_table.insert().execute(floatcol=value)
-
except Exception, e:
raise e
-
def test_money(self):
- "Exercise type specification for money types."
-
- columns = [
- # column type, args, kwargs, expected ddl
- (mssql.MSMoney, [], {},
- 'MONEY'),
- (mssql.MSSmallMoney, [], {},
- 'SMALLMONEY'),
- ]
+ """Exercise type specification for money types."""
+ columns = [(mssql.MSMoney, [], {}, 'MONEY'),
+ (mssql.MSSmallMoney, [], {}, 'SMALLMONEY')]
table_args = ['test_mssql_money', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
- table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))
-
+ table_args.append(Column('c%s' % index, type_(*args, **kw),
+ nullable=None))
money_table = Table(*table_args)
dialect = mssql.dialect()
- gen = dialect.ddl_compiler(dialect, schema.CreateTable(money_table))
-
+ gen = dialect.ddl_compiler(dialect,
+ schema.CreateTable(money_table))
for col in money_table.c:
index = int(col.name[1:])
- testing.eq_(gen.get_column_specification(col),
- "%s %s" % (col.name, columns[index][3]))
+ testing.eq_(gen.get_column_specification(col), '%s %s'
+ % (col.name, columns[index][3]))
self.assert_(repr(col))
-
try:
money_table.create(checkfirst=True)
assert True
table_args = ['test_mssql_dates', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res, requires = spec[0:5]
- if (requires and testing._is_excluded('mssql', *requires)) or not requires:
- table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))
-
+ if requires and testing._is_excluded('mssql', *requires) \
+ or not requires:
+ table_args.append(Column('c%s' % index, type_(*args,
+ **kw), nullable=None))
dates_table = Table(*table_args)
- gen = testing.db.dialect.ddl_compiler(testing.db.dialect, schema.CreateTable(dates_table))
-
+ gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
+ schema.CreateTable(dates_table))
for col in dates_table.c:
index = int(col.name[1:])
- testing.eq_(gen.get_column_specification(col),
- "%s %s" % (col.name, columns[index][3]))
+ testing.eq_(gen.get_column_specification(col), '%s %s'
+ % (col.name, columns[index][3]))
self.assert_(repr(col))
dates_table.create(checkfirst=True)
- reflected_dates = Table('test_mssql_dates', MetaData(testing.db), autoload=True)
+ reflected_dates = Table('test_mssql_dates',
+ MetaData(testing.db), autoload=True)
for col in reflected_dates.c:
self.assert_types_base(col, dates_table.c[col.key])
t.insert().execute(adate=d1, adatetime=d2, atime=t1)
- eq_(select([t.c.adate, t.c.atime, t.c.adatetime], t.c.adate==d1).execute().fetchall(), [(d1, t1, d2)])
+ eq_(select([t.c.adate, t.c.atime, t.c.adatetime], t.c.adate
+ == d1).execute().fetchall(), [(d1, t1, d2)])
@emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
def test_binary(self):
table_args = ['test_mssql_binary', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
- table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))
-
+ table_args.append(Column('c%s' % index, type_(*args, **kw),
+ nullable=None))
binary_table = Table(*table_args)
dialect = mssql.dialect()
- gen = dialect.ddl_compiler(dialect, schema.CreateTable(binary_table))
-
+ gen = dialect.ddl_compiler(dialect,
+ schema.CreateTable(binary_table))
for col in binary_table.c:
index = int(col.name[1:])
- testing.eq_(gen.get_column_specification(col),
- "%s %s" % (col.name, columns[index][3]))
+ testing.eq_(gen.get_column_specification(col), '%s %s'
+ % (col.name, columns[index][3]))
self.assert_(repr(col))
-
metadata.create_all()
- reflected_binary = Table('test_mssql_binary', MetaData(testing.db), autoload=True)
+ reflected_binary = Table('test_mssql_binary',
+ MetaData(testing.db), autoload=True)
for col in reflected_binary.c:
- c1 =testing.db.dialect.type_descriptor(col.type).__class__
- c2 =testing.db.dialect.type_descriptor(binary_table.c[col.name].type).__class__
- assert issubclass(c1, c2), "%r is not a subclass of %r" % (c1, c2)
+ c1 = testing.db.dialect.type_descriptor(col.type).__class__
+ c2 = \
+ testing.db.dialect.type_descriptor(
+ binary_table.c[col.name].type).__class__
+ assert issubclass(c1, c2), '%r is not a subclass of %r' \
+ % (c1, c2)
if binary_table.c[col.name].type.length:
- testing.eq_(col.type.length, binary_table.c[col.name].type.length)
+ testing.eq_(col.type.length,
+ binary_table.c[col.name].type.length)
def test_boolean(self):
"Exercise type specification for boolean type."
table_args = ['test_mssql_boolean', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
- table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))
+ table_args.append(
+ Column('c%s' % index, type_(*args, **kw), nullable=None))
boolean_table = Table(*table_args)
dialect = mssql.dialect()
table_args = ['test_mssql_numeric', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
- table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))
+ table_args.append(
+ Column('c%s' % index, type_(*args, **kw), nullable=None))
numeric_table = Table(*table_args)
dialect = mssql.dialect()
table_args = ['test_mssql_charset', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
- table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None))
+ table_args.append(
+ Column('c%s' % index, type_(*args, **kw), nullable=None))
charset_table = Table(*table_args)
dialect = mssql.dialect()
assert tbl._autoincrement_column is not c, name
# mxodbc can't handle scope_identity() with DEFAULT VALUES
+
if testing.db.driver == 'mxodbc':
- eng = [engines.testing_engine(options={'implicit_returning':True})]
+ eng = \
+ [engines.testing_engine(options={'implicit_returning'
+ : True})]
else:
- eng = [
- engines.testing_engine(options={'implicit_returning':False}),
- engines.testing_engine(options={'implicit_returning':True}),
- ]
+ eng = \
+ [engines.testing_engine(options={'implicit_returning'
+ : False}),
+ engines.testing_engine(options={'implicit_returning'
+ : True})]
for counter, engine in enumerate(eng):
engine.execute(tbl.insert())
if 'int_y' in tbl.c:
- assert engine.scalar(select([tbl.c.int_y])) == counter + 1
- assert list(engine.execute(tbl.select()).first()).count(counter + 1) == 1
+ assert engine.scalar(select([tbl.c.int_y])) \
+ == counter + 1
+ assert list(engine.execute(tbl.select()).first()).\
+ count(counter + 1) == 1
else:
- assert 1 not in list(engine.execute(tbl.select()).first())
+ assert 1 \
+ not in list(engine.execute(tbl.select()).first())
engine.execute(tbl.delete())
class BinaryTest(TestBase, AssertsExecutionResults):
value.stuff = 'this is the right stuff'
return value
- binary_table = Table('binary_table', MetaData(testing.db),
- Column('primary_id', Integer, Sequence('binary_id_seq', optional=True), primary_key=True),
- Column('data', mssql.MSVarBinary(8000)),
- Column('data_image', mssql.MSImage),
- Column('data_slice', types.BINARY(100)),
- Column('misc', String(30)),
- # construct PickleType with non-native pickle module, since cPickle uses relative module
- # loading and confuses this test's parent package 'sql' with the 'sqlalchemy.sql' package relative
- # to the 'types' module
- Column('pickled', PickleType),
- Column('mypickle', MyPickleType)
- )
+ binary_table = Table(
+ 'binary_table',
+ MetaData(testing.db),
+ Column('primary_id', Integer, Sequence('binary_id_seq',
+ optional=True), primary_key=True),
+ Column('data', mssql.MSVarBinary(8000)),
+ Column('data_image', mssql.MSImage),
+ Column('data_slice', types.BINARY(100)),
+ Column('misc', String(30)),
+ Column('pickled', PickleType),
+ Column('mypickle', MyPickleType),
+ )
binary_table.create()
def teardown(self):
testobj1 = pickleable.Foo('im foo 1')
testobj2 = pickleable.Foo('im foo 2')
testobj3 = pickleable.Foo('im foo 3')
-
stream1 = self.load_stream('binary_data_one.dat')
stream2 = self.load_stream('binary_data_two.dat')
- binary_table.insert().execute(primary_id=1, misc='binary_data_one.dat', data=stream1, data_image=stream1, data_slice=stream1[0:100], pickled=testobj1, mypickle=testobj3)
- binary_table.insert().execute(primary_id=2, misc='binary_data_two.dat', data=stream2, data_image=stream2, data_slice=stream2[0:99], pickled=testobj2)
-
- # TODO: pyodbc does not seem to accept "None" for a VARBINARY column (data=None).
- # error: [Microsoft][ODBC SQL Server Driver][SQL Server]Implicit conversion from
- # data type varchar to varbinary is not allowed. Use the CONVERT function to run this query. (257)
- #binary_table.insert().execute(primary_id=3, misc='binary_data_two.dat', data=None, data_image=None, data_slice=stream2[0:99], pickled=None)
- binary_table.insert().execute(primary_id=3, misc='binary_data_two.dat', data_image=None, data_slice=stream2[0:99], pickled=None)
-
- for stmt in (
- binary_table.select(order_by=binary_table.c.primary_id),
- text("select * from binary_table order by binary_table.primary_id",
- typemap=dict(data=mssql.MSVarBinary(8000), data_image=mssql.MSImage,
- data_slice=types.BINARY(100), pickled=PickleType, mypickle=MyPickleType),
- bind=testing.db)
- ):
+ binary_table.insert().execute(
+ primary_id=1,
+ misc='binary_data_one.dat',
+ data=stream1,
+ data_image=stream1,
+ data_slice=stream1[0:100],
+ pickled=testobj1,
+ mypickle=testobj3,
+ )
+ binary_table.insert().execute(
+ primary_id=2,
+ misc='binary_data_two.dat',
+ data=stream2,
+ data_image=stream2,
+ data_slice=stream2[0:99],
+ pickled=testobj2,
+ )
+
+ # TODO: pyodbc does not seem to accept "None" for a VARBINARY
+ # column (data=None). error: [Microsoft][ODBC SQL Server
+ # Driver][SQL Server]Implicit conversion from data type varchar
+ # to varbinary is not allowed. Use the CONVERT function to run
+ # this query. (257) binary_table.insert().execute(primary_id=3,
+ # misc='binary_data_two.dat', data=None, data_image=None,
+ # data_slice=stream2[0:99], pickled=None)
+
+ binary_table.insert().execute(primary_id=3,
+ misc='binary_data_two.dat', data_image=None,
+ data_slice=stream2[0:99], pickled=None)
+ for stmt in \
+ binary_table.select(order_by=binary_table.c.primary_id), \
+ text('select * from binary_table order by '
+ 'binary_table.primary_id',
+ typemap=dict(data=mssql.MSVarBinary(8000),
+ data_image=mssql.MSImage,
+ data_slice=types.BINARY(100), pickled=PickleType,
+ mypickle=MyPickleType), bind=testing.db):
l = stmt.execute().fetchall()
eq_(list(stream1), list(l[0]['data']))
-
paddedstream = list(stream1[0:100])
paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
eq_(paddedstream, list(l[0]['data_slice']))
-
eq_(list(stream2), list(l[1]['data']))
eq_(list(stream2), list(l[1]['data_image']))
eq_(testobj1, l[0]['pickled'])
def close(self):
pass
-
class MxODBCTest(TestBase):
+
def test_native_odbc_execute(self):
t1 = Table('t1', MetaData(), Column('c1', Integer))
-
dbapi = MockDBAPI()
- engine = engines.testing_engine(
- 'mssql+mxodbc://localhost',
- options={'module':dbapi,
- '_initialize':False}
- )
+ engine = engines.testing_engine('mssql+mxodbc://localhost',
+ options={'module': dbapi, '_initialize': False})
conn = engine.connect()
-
+
# crud: uses execute
+
conn.execute(t1.insert().values(c1='foo'))
- conn.execute(t1.delete().where(t1.c.c1=='foo'))
- conn.execute(t1.update().where(t1.c.c1=='foo').values(c1='bar'))
-
+ conn.execute(t1.delete().where(t1.c.c1 == 'foo'))
+ conn.execute(t1.update().where(t1.c.c1 == 'foo').values(c1='bar'
+ ))
+
# select: uses executedirect
+
conn.execute(t1.select())
-
+
# manual flagging
- conn.execution_options(native_odbc_execute=True).execute(t1.select())
- conn.execution_options(native_odbc_execute=False).execute(t1.insert().values(c1='foo'))
-
- eq_(
- dbapi.log,
- ['execute', 'execute', 'execute',
- 'executedirect', 'execute', 'executedirect']
- )
-
-
\ No newline at end of file
+
+ conn.execution_options(native_odbc_execute=True).\
+ execute(t1.select())
+ conn.execution_options(native_odbc_execute=False).\
+ execute(t1.insert().values(c1='foo'
+ ))
+ eq_(dbapi.log, [
+ 'execute',
+ 'execute',
+ 'execute',
+ 'executedirect',
+ 'execute',
+ 'executedirect',
+ ])
columns = [
# column type, args, kwargs, expected ddl
- # e.g. Column(Integer(10, unsigned=True)) == 'INTEGER(10) UNSIGNED'
+ # e.g. Column(Integer(10, unsigned=True)) ==
+ # 'INTEGER(10) UNSIGNED'
(mysql.MSNumeric, [], {},
'NUMERIC'),
(mysql.MSNumeric, [None], {},
table_args.append(Column('c%s' % index, type_(*args, **kw)))
numeric_table = Table(*table_args)
- gen = testing.db.dialect.ddl_compiler(testing.db.dialect, numeric_table)
+ gen = testing.db.dialect.ddl_compiler(
+ testing.db.dialect, numeric_table)
for col in numeric_table.c:
index = int(col.name[1:])
'NATIONAL CHAR(1)'),
(mysql.MSChar, [1], {'national':True, 'charset':'utf8'},
'NATIONAL CHAR(1)'),
- (mysql.MSChar, [1], {'national':True, 'charset':'utf8', 'binary':True},
+ (mysql.MSChar, [1], {'national':True, 'charset':'utf8',
+ 'binary':True},
'NATIONAL CHAR(1) BINARY'),
- (mysql.MSChar, [1], {'national':True, 'binary':True, 'unicode':True},
+ (mysql.MSChar, [1], {'national':True, 'binary':True,
+ 'unicode':True},
'NATIONAL CHAR(1) BINARY'),
(mysql.MSChar, [1], {'national':True, 'collation':'utf8_bin'},
'NATIONAL CHAR(1) COLLATE utf8_bin'),
table_args.append(Column('c%s' % index, type_(*args, **kw)))
charset_table = Table(*table_args)
- gen = testing.db.dialect.ddl_compiler(testing.db.dialect, charset_table)
+ gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
+ charset_table)
for col in charset_table.c:
index = int(col.name[1:])
"""Test BOOL/TINYINT(1) compatability and reflection."""
meta = MetaData(testing.db)
- bool_table = Table('mysql_bool', meta,
- Column('b1', BOOLEAN),
- Column('b2', Boolean),
- Column('b3', mysql.MSTinyInteger(1)),
- Column('b4', mysql.MSTinyInteger(1, unsigned=True)),
- Column('b5', mysql.MSTinyInteger))
-
+ bool_table = Table(
+ 'mysql_bool',
+ meta,
+ Column('b1', BOOLEAN),
+ Column('b2', Boolean),
+ Column('b3', mysql.MSTinyInteger(1)),
+ Column('b4', mysql.MSTinyInteger(1, unsigned=True)),
+ Column('b5', mysql.MSTinyInteger),
+ )
eq_(colspec(bool_table.c.b1), 'b1 BOOL')
eq_(colspec(bool_table.c.b2), 'b2 BOOL')
eq_(colspec(bool_table.c.b3), 'b3 TINYINT(1)')
eq_(colspec(bool_table.c.b4), 'b4 TINYINT(1) UNSIGNED')
eq_(colspec(bool_table.c.b5), 'b5 TINYINT')
-
for col in bool_table.c:
self.assert_(repr(col))
try:
meta.create_all()
-
table = bool_table
+
def roundtrip(store, expected=None):
expected = expected or store
table.insert(store).execute()
if isinstance(val, bool):
self.assert_(val is row[i])
except:
- print "Storing %s" % store
- print "Expected %s" % expected
- print "Found %s" % list(row)
+ print 'Storing %s' % store
+ print 'Expected %s' % expected
+ print 'Found %s' % list(row)
raise
table.delete().execute().close()
-
roundtrip([None, None, None, None, None])
roundtrip([True, True, 1, 1, 1])
roundtrip([False, False, 0, 0, 0])
- roundtrip([True, True, True, True, True], [True, True, 1, 1, 1])
+ roundtrip([True, True, True, True, True], [True, True, 1,
+ 1, 1])
roundtrip([False, False, 0, 0, 0], [False, False, 0, 0, 0])
-
meta2 = MetaData(testing.db)
table = Table('mysql_bool', meta2, autoload=True)
eq_(colspec(table.c.b3), 'b3 TINYINT(1)')
eq_(colspec(table.c.b4), 'b4 TINYINT(1) UNSIGNED')
-
meta2 = MetaData(testing.db)
- table = Table('mysql_bool', meta2,
- Column('b1', BOOLEAN),
- Column('b2', Boolean),
- Column('b3', BOOLEAN),
- Column('b4', BOOLEAN),
- autoload=True)
+ table = Table(
+ 'mysql_bool',
+ meta2,
+ Column('b1', BOOLEAN),
+ Column('b2', Boolean),
+ Column('b3', BOOLEAN),
+ Column('b4', BOOLEAN),
+ autoload=True,
+ )
eq_(colspec(table.c.b3), 'b3 BOOL')
eq_(colspec(table.c.b4), 'b4 BOOL')
-
roundtrip([None, None, None, None, None])
- roundtrip([True, True, 1, 1, 1], [True, True, True, True, 1])
- roundtrip([False, False, 0, 0, 0], [False, False, False, False, 0])
- roundtrip([True, True, True, True, True], [True, True, True, True, 1])
- roundtrip([False, False, 0, 0, 0], [False, False, False, False, 0])
+ roundtrip([True, True, 1, 1, 1], [True, True, True, True,
+ 1])
+ roundtrip([False, False, 0, 0, 0], [False, False, False,
+ False, 0])
+ roundtrip([True, True, True, True, True], [True, True,
+ True, True, 1])
+ roundtrip([False, False, 0, 0, 0], [False, False, False,
+ False, 0])
finally:
meta.drop_all()
"""Exercise the SET type."""
meta = MetaData(testing.db)
- set_table = Table('mysql_set', meta,
- Column('s1', mysql.MSSet("'dq'", "'sq'")),
- Column('s2', mysql.MSSet("'a'")),
- Column('s3', mysql.MSSet("'5'", "'7'", "'9'")))
-
+ set_table = Table('mysql_set', meta, Column('s1',
+ mysql.MSSet("'dq'", "'sq'")), Column('s2',
+ mysql.MSSet("'a'")), Column('s3',
+ mysql.MSSet("'5'", "'7'", "'9'")))
eq_(colspec(set_table.c.s1), "s1 SET('dq','sq')")
eq_(colspec(set_table.c.s2), "s2 SET('a')")
eq_(colspec(set_table.c.s3), "s3 SET('5','7','9')")
-
for col in set_table.c:
self.assert_(repr(col))
try:
set_table.create()
reflected = Table('mysql_set', MetaData(testing.db),
autoload=True)
-
for table in set_table, reflected:
+
def roundtrip(store, expected=None):
expected = expected or store
table.insert(store).execute()
try:
self.assert_(list(row) == expected)
except:
- print "Storing %s" % store
- print "Expected %s" % expected
- print "Found %s" % list(row)
+ print 'Storing %s' % store
+ print 'Expected %s' % expected
+ print 'Found %s' % list(row)
raise
table.delete().execute()
- roundtrip([None, None, None],[None] * 3)
+ roundtrip([None, None, None], [None] * 3)
roundtrip(['', '', ''], [set([''])] * 3)
-
roundtrip([set(['dq']), set(['a']), set(['5'])])
- roundtrip(['dq', 'a', '5'],
- [set(['dq']), set(['a']), set(['5'])])
- roundtrip([1, 1, 1],
- [set(['dq']), set(['a']), set(['5'])])
- roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'])])
-
- set_table.insert().execute({'s3':set(['5'])},
- {'s3':set(['5', '7'])},
- {'s3':set(['5', '7', '9'])},
- {'s3':set(['7', '9'])})
- rows = select(
- [set_table.c.s3],
- set_table.c.s3.in_([set(['5']), set(['5', '7']), set(['7', '5'])])
- ).execute().fetchall()
+ roundtrip(['dq', 'a', '5'], [set(['dq']), set(['a']),
+ set(['5'])])
+ roundtrip([1, 1, 1], [set(['dq']), set(['a']), set(['5'
+ ])])
+ roundtrip([set(['dq', 'sq']), None, set(['9', '5', '7'
+ ])])
+ set_table.insert().execute({'s3': set(['5'])}, {'s3'
+ : set(['5', '7'])}, {'s3': set(['5', '7', '9'])},
+ {'s3': set(['7', '9'])})
+ rows = select([set_table.c.s3], set_table.c.s3.in_([set(['5'
+ ]), set(['5', '7']), set(['7', '5'
+ ])])).execute().fetchall()
found = set([frozenset(row[0]) for row in rows])
eq_(found, set([frozenset(['5']), frozenset(['5', '7'])]))
finally:
enum_table.drop(checkfirst=True)
enum_table.create()
- assert_raises(exc.SQLError, enum_table.insert().execute, e1=None, e2=None, e3=None, e4=None)
+ assert_raises(exc.SQLError, enum_table.insert().execute,
+ e1=None, e2=None, e3=None, e4=None)
assert_raises(exc.InvalidRequestError, enum_table.insert().execute,
e1='c', e2='c', e2generic='c', e3='c',
t1.insert().execute(value=u'réveillé', value2=u'réveillé')
t1.insert().execute(value=u'S’il', value2=u'S’il')
eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
- [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'), (3, u'S’il', u'S’il')]
+ [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
+ (3, u'S’il', u'S’il')]
)
-
+
# test reflection of the enum labels
+
m2 = MetaData(testing.db)
t2 = Table('table', m2, autoload=True)
- # TODO: what's wrong with the last element ? is there
+
+ # TODO: what's wrong with the last element ? is there
# latin-1 stuff forcing its way in ?
- assert t2.c.value.type.enums[0:2] == (u'réveillé', u'drôle') #, u'S’il') # eh ?
- assert t2.c.value2.type.enums[0:2] == (u'réveillé', u'drôle') #, u'S’il') # eh ?
-
+
+ assert t2.c.value.type.enums[0:2] == \
+ (u'réveillé', u'drôle') #, u'S’il') # eh ?
+ assert t2.c.value2.type.enums[0:2] == \
+ (u'réveillé', u'drôle') #, u'S’il') # eh ?
finally:
metadata.drop_all()
def test_enum_compile(self):
- e1 = Enum('x', 'y', 'z', name="somename")
+ e1 = Enum('x', 'y', 'z', name='somename')
t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
- self.assert_compile(
- schema.CreateTable(t1),
- "CREATE TABLE sometable (somecolumn ENUM('x','y','z'))"
- )
- t1 = Table('sometable', MetaData(),
- Column('somecolumn', Enum('x', 'y', 'z', native_enum=False))
- )
- self.assert_compile(
- schema.CreateTable(t1),
- "CREATE TABLE sometable ("
- "somecolumn VARCHAR(1), "
- "CHECK (somecolumn IN ('x', 'y', 'z'))"
- ")"
- )
+ self.assert_compile(schema.CreateTable(t1),
+ "CREATE TABLE sometable (somecolumn "
+ "ENUM('x','y','z'))")
+ t1 = Table('sometable', MetaData(), Column('somecolumn',
+ Enum('x', 'y', 'z', native_enum=False)))
+ self.assert_compile(schema.CreateTable(t1),
+ "CREATE TABLE sometable (somecolumn "
+ "VARCHAR(1), CHECK (somecolumn IN ('x', "
+ "'y', 'z')))")
@testing.exclude('mysql', '<', (4,), "3.23 can't handle an ENUM of ''")
@testing.uses_deprecated('Manually quoting ENUM value literals')
finally:
enum_table.drop()
-
-
class ReflectionTest(TestBase, AssertsExecutionResults):
__only_on__ = 'mysql'
def test_default_reflection(self):
"""Test reflection of column defaults."""
+
from sqlalchemy.dialects.mysql import VARCHAR
-
- def_table = Table('mysql_def', MetaData(testing.db),
- Column('c1', VARCHAR(10, collation='utf8_unicode_ci'), DefaultClause(''), nullable=False),
+ def_table = Table(
+ 'mysql_def',
+ MetaData(testing.db),
+ Column('c1', VARCHAR(10, collation='utf8_unicode_ci'),
+ DefaultClause(''), nullable=False),
Column('c2', String(10), DefaultClause('0')),
Column('c3', String(10), DefaultClause('abc')),
- Column('c4', TIMESTAMP, DefaultClause('2009-04-05 12:00:00')),
+ Column('c4', TIMESTAMP, DefaultClause('2009-04-05 12:00:00'
+ )),
Column('c5', TIMESTAMP),
- )
-
+ )
def_table.create()
try:
reflected = Table('mysql_def', MetaData(testing.db),
- autoload=True)
+ autoload=True)
finally:
def_table.drop()
-
assert def_table.c.c1.server_default.arg == ''
assert def_table.c.c2.server_default.arg == '0'
assert def_table.c.c3.server_default.arg == 'abc'
- assert def_table.c.c4.server_default.arg == '2009-04-05 12:00:00'
-
+ assert def_table.c.c4.server_default.arg \
+ == '2009-04-05 12:00:00'
assert str(reflected.c.c1.server_default.arg) == "''"
assert str(reflected.c.c2.server_default.arg) == "'0'"
assert str(reflected.c.c3.server_default.arg) == "'abc'"
- assert str(reflected.c.c4.server_default.arg) == "'2009-04-05 12:00:00'"
-
+ assert str(reflected.c.c4.server_default.arg) \
+ == "'2009-04-05 12:00:00'"
reflected.create()
try:
- reflected2 = Table('mysql_def', MetaData(testing.db), autoload=True)
+ reflected2 = Table('mysql_def', MetaData(testing.db),
+ autoload=True)
finally:
reflected.drop()
-
assert str(reflected2.c.c1.server_default.arg) == "''"
assert str(reflected2.c.c2.server_default.arg) == "'0'"
assert str(reflected2.c.c3.server_default.arg) == "'abc'"
- assert str(reflected2.c.c4.server_default.arg) == "'2009-04-05 12:00:00'"
+ assert str(reflected2.c.c4.server_default.arg) \
+ == "'2009-04-05 12:00:00'"
def test_reflection_with_table_options(self):
comment = r"""Comment types type speedily ' " \ '' Fun!"""
for table in tables:
for i, reflected in enumerate(table.c):
- assert isinstance(reflected.type, type(expected[i])), \
- "element %d: %r not instance of %r" % (i, reflected.type, type(expected[i]))
+ assert isinstance(reflected.type,
+ type(expected[i])), \
+ 'element %d: %r not instance of %r' % (i,
+ reflected.type, type(expected[i]))
finally:
db.execute('DROP VIEW mysql_types_v')
finally:
# 'SIGNED INTEGER' is a bigint, so this is ok.
(m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
- (m.MSBigInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"),
+ (m.MSBigInteger(unsigned=True),
+ "CAST(t.col AS UNSIGNED INTEGER)"),
(m.MSBit, "t.col"),
# this is kind of sucky. thank you default arguments!
"SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t")
def test_innodb_autoincrement(self):
- t1 = Table('sometable', MetaData(),
- Column('assigned_id', Integer(), primary_key=True, autoincrement=False),
- Column('id', Integer(), primary_key=True, autoincrement=True),
- mysql_engine='InnoDB'
- )
-
- self.assert_compile(
- schema.CreateTable(t1),
- "CREATE TABLE sometable ("
- "assigned_id INTEGER NOT NULL, "
- "id INTEGER NOT NULL AUTO_INCREMENT, "
- "PRIMARY KEY (assigned_id, id), "
- "KEY `idx_autoinc_id`(`id`)"
- ")ENGINE=InnoDB"
- )
-
- t1 = Table('sometable', MetaData(),
- Column('assigned_id', Integer(), primary_key=True, autoincrement=True),
- Column('id', Integer(), primary_key=True, autoincrement=False),
- mysql_engine='InnoDB'
- )
-
- self.assert_compile(
- schema.CreateTable(t1),
- "CREATE TABLE sometable ("
- "assigned_id INTEGER NOT NULL AUTO_INCREMENT, "
- "id INTEGER NOT NULL, "
- "PRIMARY KEY (assigned_id, id)"
- ")ENGINE=InnoDB"
- )
+ t1 = Table('sometable', MetaData(), Column('assigned_id',
+ Integer(), primary_key=True, autoincrement=False),
+ Column('id', Integer(), primary_key=True,
+ autoincrement=True), mysql_engine='InnoDB')
+ self.assert_compile(schema.CreateTable(t1),
+ 'CREATE TABLE sometable (assigned_id '
+ 'INTEGER NOT NULL, id INTEGER NOT NULL '
+ 'AUTO_INCREMENT, PRIMARY KEY (assigned_id, '
+ 'id), KEY `idx_autoinc_id`(`id`))ENGINE=Inn'
+ 'oDB')
+
+ t1 = Table('sometable', MetaData(), Column('assigned_id',
+ Integer(), primary_key=True, autoincrement=True),
+ Column('id', Integer(), primary_key=True,
+ autoincrement=False), mysql_engine='InnoDB')
+ self.assert_compile(schema.CreateTable(t1),
+ 'CREATE TABLE sometable (assigned_id '
+ 'INTEGER NOT NULL AUTO_INCREMENT, id '
+ 'INTEGER NOT NULL, PRIMARY KEY '
+ '(assigned_id, id))ENGINE=InnoDB')
class SQLModeDetectionTest(TestBase):
__only_on__ = 'mysql'
""")
def test_out_params(self):
- result = testing.db.execute(text("begin foo(:x_in, :x_out, :y_out, :z_out); end;",
- bindparams=[
- bindparam('x_in', Float),
- outparam('x_out', Integer),
- outparam('y_out', Float),
- outparam('z_out', String)]),
- x_in=5)
- eq_(
- result.out_parameters,
- {'x_out':10, 'y_out':75, 'z_out':None}
- )
+ result = \
+ testing.db.execute(text('begin foo(:x_in, :x_out, :y_out, '
+ ':z_out); end;',
+ bindparams=[bindparam('x_in', Float),
+ outparam('x_out', Integer),
+ outparam('y_out', Float),
+ outparam('z_out', String)]), x_in=5)
+ eq_(result.out_parameters, {'x_out': 10, 'y_out': 75, 'z_out'
+ : None})
assert isinstance(result.out_parameters['x_out'], int)
@classmethod
class CompileTest(TestBase, AssertsCompiledSQL):
+
__dialect__ = oracle.OracleDialect()
def test_owner(self):
- meta = MetaData()
- parent = Table('parent', meta, Column('id', Integer, primary_key=True),
- Column('name', String(50)),
- schema='ed')
- child = Table('child', meta, Column('id', Integer, primary_key=True),
- Column('parent_id', Integer, ForeignKey('ed.parent.id')),
- schema = 'ed')
-
- self.assert_compile(
- parent.join(child),
- "ed.parent JOIN ed.child ON ed.parent.id = ed.child.parent_id")
+ meta = MetaData()
+ parent = Table('parent', meta, Column('id', Integer,
+ primary_key=True), Column('name', String(50)),
+ schema='ed')
+ child = Table('child', meta, Column('id', Integer,
+ primary_key=True), Column('parent_id', Integer,
+ ForeignKey('ed.parent.id')), schema='ed')
+ self.assert_compile(parent.join(child),
+ 'ed.parent JOIN ed.child ON ed.parent.id = '
+ 'ed.child.parent_id')
def test_subquery(self):
t = table('sometable', column('col1'), column('col2'))
def test_limit(self):
t = table('sometable', column('col1'), column('col2'))
-
s = select([t])
c = s.compile(dialect=oracle.OracleDialect())
assert t.c.col1 in set(c.result_map['col1'][1])
-
s = select([t]).limit(10).offset(20)
+ self.assert_compile(s,
+ 'SELECT col1, col2 FROM (SELECT col1, '
+ 'col2, ROWNUM AS ora_rn FROM (SELECT '
+ 'sometable.col1 AS col1, sometable.col2 AS '
+ 'col2 FROM sometable) WHERE ROWNUM <= '
+ ':ROWNUM_1) WHERE ora_rn > :ora_rn_1')
- self.assert_compile(s,
- "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
- "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
- "FROM sometable) WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1"
- )
-
- # assert that despite the subquery, the columns from the table,
- # not the select, get put into the "result_map"
c = s.compile(dialect=oracle.OracleDialect())
assert t.c.col1 in set(c.result_map['col1'][1])
-
s = select([s.c.col1, s.c.col2])
-
- self.assert_compile(s, "SELECT col1, col2 FROM (SELECT col1, col2 FROM "
- "(SELECT col1, col2, ROWNUM AS ora_rn FROM (SELECT "
- "sometable.col1 AS col1, sometable.col2 AS col2 FROM "
- "sometable) WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1)")
-
- # testing this twice to ensure oracle doesn't modify the original statement
- self.assert_compile(s, "SELECT col1, col2 FROM (SELECT col1, col2 FROM "
- "(SELECT col1, col2, ROWNUM AS ora_rn FROM (SELECT "
- "sometable.col1 AS col1, sometable.col2 AS col2 FROM sometable) "
- "WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1)")
+ self.assert_compile(s,
+ 'SELECT col1, col2 FROM (SELECT col1, col2 '
+ 'FROM (SELECT col1, col2, ROWNUM AS ora_rn '
+ 'FROM (SELECT sometable.col1 AS col1, '
+ 'sometable.col2 AS col2 FROM sometable) '
+ 'WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > '
+ ':ora_rn_1)')
+
+ self.assert_compile(s,
+ 'SELECT col1, col2 FROM (SELECT col1, col2 '
+ 'FROM (SELECT col1, col2, ROWNUM AS ora_rn '
+ 'FROM (SELECT sometable.col1 AS col1, '
+ 'sometable.col2 AS col2 FROM sometable) '
+ 'WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > '
+ ':ora_rn_1)')
s = select([t]).limit(10).offset(20).order_by(t.c.col2)
-
- self.assert_compile(s, "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM "
- "AS ora_rn FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 FROM sometable "
- "ORDER BY sometable.col2) WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1")
-
+ self.assert_compile(s,
+ 'SELECT col1, col2 FROM (SELECT col1, '
+ 'col2, ROWNUM AS ora_rn FROM (SELECT '
+ 'sometable.col1 AS col1, sometable.col2 AS '
+ 'col2 FROM sometable ORDER BY '
+ 'sometable.col2) WHERE ROWNUM <= '
+ ':ROWNUM_1) WHERE ora_rn > :ora_rn_1')
s = select([t], for_update=True).limit(10).order_by(t.c.col2)
- self.assert_compile(
- s,
- "SELECT col1, col2 FROM (SELECT sometable.col1 "
- "AS col1, sometable.col2 AS col2 FROM sometable "
- "ORDER BY sometable.col2) WHERE ROWNUM <= :ROWNUM_1 FOR UPDATE"
- )
+ self.assert_compile(s,
+ 'SELECT col1, col2 FROM (SELECT '
+ 'sometable.col1 AS col1, sometable.col2 AS '
+ 'col2 FROM sometable ORDER BY '
+ 'sometable.col2) WHERE ROWNUM <= :ROWNUM_1 '
+ 'FOR UPDATE')
- s = select([t], for_update=True).limit(10).offset(20).order_by(t.c.col2)
- self.assert_compile(
- s,
- "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM "
- "AS ora_rn FROM (SELECT sometable.col1 AS col1, "
- "sometable.col2 AS col2 FROM sometable ORDER BY "
- "sometable.col2) WHERE ROWNUM <= :ROWNUM_1) WHERE "
- "ora_rn > :ora_rn_1 FOR UPDATE"
- )
+ s = select([t],
+ for_update=True).limit(10).offset(20).order_by(t.c.col2)
+ self.assert_compile(s,
+ 'SELECT col1, col2 FROM (SELECT col1, '
+ 'col2, ROWNUM AS ora_rn FROM (SELECT '
+ 'sometable.col1 AS col1, sometable.col2 AS '
+ 'col2 FROM sometable ORDER BY '
+ 'sometable.col2) WHERE ROWNUM <= '
+ ':ROWNUM_1) WHERE ora_rn > :ora_rn_1 FOR '
+ 'UPDATE')
def test_long_labels(self):
)
anon = a_table.alias()
- self.assert_compile(
- select([other_table, anon]).select_from(
- other_table.outerjoin(anon)
- ).apply_labels(),
- "SELECT other_thirty_characters_table_.id AS other_thirty_characters__1, "
- "other_thirty_characters_table_.thirty_characters_table_id AS "
- "other_thirty_characters__2, "
- "thirty_characters_table__1.id AS thirty_characters_table__3 FROM "
- "other_thirty_characters_table_ "
- "LEFT OUTER JOIN thirty_characters_table_xxxxxx AS thirty_characters_table__1 "
- "ON thirty_characters_table__1.id = "
- "other_thirty_characters_table_.thirty_characters_table_id",
- dialect=dialect
- )
- self.assert_compile(
-
- select([other_table, anon]).select_from(
- other_table.outerjoin(anon)
- ).apply_labels(),
- "SELECT other_thirty_characters_table_.id AS other_thirty_characters__1, "
- "other_thirty_characters_table_.thirty_characters_table_id AS "
- "other_thirty_characters__2, "
- "thirty_characters_table__1.id AS thirty_characters_table__3 FROM "
- "other_thirty_characters_table_ "
- "LEFT OUTER JOIN thirty_characters_table_xxxxxx thirty_characters_table__1 ON "
- "thirty_characters_table__1.id = "
- "other_thirty_characters_table_.thirty_characters_table_id",
- dialect=ora_dialect
- )
+ self.assert_compile(select([other_table,
+ anon]).
+ select_from(
+ other_table.outerjoin(anon)).apply_labels(),
+ 'SELECT other_thirty_characters_table_.id '
+ 'AS other_thirty_characters__1, '
+ 'other_thirty_characters_table_.thirty_char'
+ 'acters_table_id AS other_thirty_characters'
+ '__2, thirty_characters_table__1.id AS '
+ 'thirty_characters_table__3 FROM '
+ 'other_thirty_characters_table_ LEFT OUTER '
+ 'JOIN thirty_characters_table_xxxxxx AS '
+ 'thirty_characters_table__1 ON '
+ 'thirty_characters_table__1.id = '
+ 'other_thirty_characters_table_.thirty_char'
+ 'acters_table_id', dialect=dialect)
+ self.assert_compile(select([other_table,
+ anon]).select_from(
+ other_table.outerjoin(anon)).apply_labels(),
+ 'SELECT other_thirty_characters_table_.id '
+ 'AS other_thirty_characters__1, '
+ 'other_thirty_characters_table_.thirty_char'
+ 'acters_table_id AS other_thirty_characters'
+ '__2, thirty_characters_table__1.id AS '
+ 'thirty_characters_table__3 FROM '
+ 'other_thirty_characters_table_ LEFT OUTER '
+ 'JOIN thirty_characters_table_xxxxxx '
+ 'thirty_characters_table__1 ON '
+ 'thirty_characters_table__1.id = '
+ 'other_thirty_characters_table_.thirty_char'
+ 'acters_table_id', dialect=ora_dialect)
def test_outer_join(self):
table1 = table('mytable',
column('otherstuff', String),
)
- query = select(
- [table1, table2],
- or_(
- table1.c.name == 'fred',
- table1.c.myid == 10,
- table2.c.othername != 'jack',
- "EXISTS (select yay from foo where boo = lar)"
- ),
- from_obj = [ outerjoin(table1, table2, table1.c.myid == table2.c.otherid) ]
- )
+ query = select([table1, table2], or_(table1.c.name == 'fred',
+ table1.c.myid == 10, table2.c.othername != 'jack'
+ , 'EXISTS (select yay from foo where boo = lar)'
+ ), from_obj=[outerjoin(table1, table2,
+ table1.c.myid == table2.c.otherid)])
self.assert_compile(query,
- "SELECT mytable.myid, mytable.name, mytable.description, myothertable.otherid, "
- "myothertable.othername FROM mytable, myothertable WHERE "
- "(mytable.name = :name_1 OR mytable.myid = :myid_1 OR "
- "myothertable.othername != :othername_1 OR EXISTS (select yay "
- "from foo where boo = lar)) "
- "AND mytable.myid = myothertable.otherid(+)",
- dialect=oracle.OracleDialect(use_ansi = False))
-
- query = table1.outerjoin(table2, table1.c.myid==table2.c.otherid).\
- outerjoin(table3, table3.c.userid==table2.c.otherid)
- self.assert_compile(query.select(),
- "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, myothertable.othername, thirdtable.userid,"
- " thirdtable.otherstuff "
- "FROM mytable LEFT OUTER JOIN myothertable ON mytable.myid ="
- " myothertable.otherid LEFT OUTER "
- "JOIN thirdtable ON thirdtable.userid = myothertable.otherid")
-
- self.assert_compile(query.select(),
- "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, myothertable.othername, thirdtable.userid,"
- " thirdtable.otherstuff FROM "
- "mytable, myothertable, thirdtable WHERE thirdtable.userid(+) ="
- " myothertable.otherid AND "
- "mytable.myid = myothertable.otherid(+)",
- dialect=oracle.dialect(use_ansi=False))
-
- query = table1.join(table2, table1.c.myid==table2.c.otherid).\
- join(table3, table3.c.userid==table2.c.otherid)
- self.assert_compile(query.select(),
- "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, myothertable.othername, thirdtable.userid, "
- "thirdtable.otherstuff FROM "
- "mytable, myothertable, thirdtable WHERE thirdtable.userid = "
- "myothertable.otherid AND "
- "mytable.myid = myothertable.otherid", dialect=oracle.dialect(use_ansi=False))
-
- query = table1.join(table2, table1.c.myid==table2.c.otherid).\
- outerjoin(table3, table3.c.userid==table2.c.otherid)
-
- self.assert_compile(query.select().order_by(table1.c.name).limit(10).offset(5),
-
- "SELECT myid, name, description, otherid, othername, userid, "
- "otherstuff FROM (SELECT myid, name, description, "
- "otherid, othername, userid, otherstuff, "
- "ROWNUM AS ora_rn FROM (SELECT "
- "mytable.myid AS myid, mytable.name AS name, "
- "mytable.description AS description, "
- "myothertable.otherid AS otherid, myothertable.othername "
- "AS othername, "
- "thirdtable.userid AS userid, thirdtable.otherstuff AS "
- "otherstuff FROM mytable, "
- "myothertable, thirdtable WHERE thirdtable.userid(+) = "
- "myothertable.otherid AND "
- "mytable.myid = myothertable.otherid ORDER BY "
- "mytable.name) WHERE "
- "ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1",
- dialect=oracle.dialect(use_ansi=False))
-
- subq = select([table1]).\
- select_from(
- table1.outerjoin(table2, table1.c.myid==table2.c.otherid)
- ).alias()
- q = select([table3]).select_from(
- table3.outerjoin(subq, table3.c.userid==subq.c.myid)
- )
-
- self.assert_compile(q, "SELECT thirdtable.userid, thirdtable.otherstuff "
- "FROM thirdtable LEFT OUTER JOIN (SELECT mytable.myid AS "
- "myid, mytable.name"
- " AS name, mytable.description AS description "
- "FROM mytable LEFT OUTER JOIN myothertable ON mytable.myid = "
- "myothertable.otherid) anon_1 ON thirdtable.userid = anon_1.myid",
- dialect=oracle.dialect(use_ansi=True))
-
- self.assert_compile(q, "SELECT thirdtable.userid, thirdtable.otherstuff "
- "FROM thirdtable, (SELECT mytable.myid AS myid, mytable.name AS name, "
- "mytable.description AS description FROM mytable, myothertable "
- "WHERE mytable.myid = myothertable.otherid(+)) anon_1 "
- "WHERE thirdtable.userid = anon_1.myid(+)",
- dialect=oracle.dialect(use_ansi=False))
-
- q = select([table1.c.name]).where(table1.c.name=='foo')
- self.assert_compile(q,
- "SELECT mytable.name FROM mytable WHERE mytable.name = :name_1",
- dialect=oracle.dialect(use_ansi=False))
-
- subq = select([table3.c.otherstuff]).\
- where(table3.c.otherstuff==table1.c.name).\
- label('bar')
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, myothertable.otherid,'
+ ' myothertable.othername FROM mytable, '
+ 'myothertable WHERE (mytable.name = '
+ ':name_1 OR mytable.myid = :myid_1 OR '
+ 'myothertable.othername != :othername_1 OR '
+ 'EXISTS (select yay from foo where boo = '
+ 'lar)) AND mytable.myid = '
+ 'myothertable.otherid(+)',
+ dialect=oracle.OracleDialect(use_ansi=False))
+ query = table1.outerjoin(table2, table1.c.myid
+ == table2.c.otherid).outerjoin(table3,
+ table3.c.userid == table2.c.otherid)
+ self.assert_compile(query.select(),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, myothertable.otherid,'
+ ' myothertable.othername, '
+ 'thirdtable.userid, thirdtable.otherstuff '
+ 'FROM mytable LEFT OUTER JOIN myothertable '
+ 'ON mytable.myid = myothertable.otherid '
+ 'LEFT OUTER JOIN thirdtable ON '
+ 'thirdtable.userid = myothertable.otherid')
+
+ self.assert_compile(query.select(),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, myothertable.otherid,'
+ ' myothertable.othername, '
+ 'thirdtable.userid, thirdtable.otherstuff '
+ 'FROM mytable, myothertable, thirdtable '
+ 'WHERE thirdtable.userid(+) = '
+ 'myothertable.otherid AND mytable.myid = '
+ 'myothertable.otherid(+)',
+ dialect=oracle.dialect(use_ansi=False))
+ query = table1.join(table2, table1.c.myid
+ == table2.c.otherid).join(table3,
+ table3.c.userid == table2.c.otherid)
+ self.assert_compile(query.select(),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, myothertable.otherid,'
+ ' myothertable.othername, '
+ 'thirdtable.userid, thirdtable.otherstuff '
+ 'FROM mytable, myothertable, thirdtable '
+ 'WHERE thirdtable.userid = '
+ 'myothertable.otherid AND mytable.myid = '
+ 'myothertable.otherid',
+ dialect=oracle.dialect(use_ansi=False))
+ query = table1.join(table2, table1.c.myid
+ == table2.c.otherid).outerjoin(table3,
+ table3.c.userid == table2.c.otherid)
+ self.assert_compile(query.select().order_by(table1.c.name).
+ limit(10).offset(5),
+ 'SELECT myid, name, description, otherid, '
+ 'othername, userid, otherstuff FROM '
+ '(SELECT myid, name, description, otherid, '
+ 'othername, userid, otherstuff, ROWNUM AS '
+ 'ora_rn FROM (SELECT mytable.myid AS myid, '
+ 'mytable.name AS name, mytable.description '
+ 'AS description, myothertable.otherid AS '
+ 'otherid, myothertable.othername AS '
+ 'othername, thirdtable.userid AS userid, '
+ 'thirdtable.otherstuff AS otherstuff FROM '
+ 'mytable, myothertable, thirdtable WHERE '
+ 'thirdtable.userid(+) = '
+ 'myothertable.otherid AND mytable.myid = '
+ 'myothertable.otherid ORDER BY '
+ 'mytable.name) WHERE ROWNUM <= :ROWNUM_1) '
+ 'WHERE ora_rn > :ora_rn_1',
+ dialect=oracle.dialect(use_ansi=False))
+
+ subq = select([table1]).select_from(table1.outerjoin(table2,
+ table1.c.myid == table2.c.otherid)).alias()
+ q = select([table3]).select_from(table3.outerjoin(subq,
+ table3.c.userid == subq.c.myid))
+
+ self.assert_compile(q,
+ 'SELECT thirdtable.userid, '
+ 'thirdtable.otherstuff FROM thirdtable '
+ 'LEFT OUTER JOIN (SELECT mytable.myid AS '
+ 'myid, mytable.name AS name, '
+ 'mytable.description AS description FROM '
+ 'mytable LEFT OUTER JOIN myothertable ON '
+ 'mytable.myid = myothertable.otherid) '
+ 'anon_1 ON thirdtable.userid = anon_1.myid'
+ , dialect=oracle.dialect(use_ansi=True))
+
+ self.assert_compile(q,
+ 'SELECT thirdtable.userid, '
+ 'thirdtable.otherstuff FROM thirdtable, '
+ '(SELECT mytable.myid AS myid, '
+ 'mytable.name AS name, mytable.description '
+ 'AS description FROM mytable, myothertable '
+ 'WHERE mytable.myid = myothertable.otherid('
+ '+)) anon_1 WHERE thirdtable.userid = '
+ 'anon_1.myid(+)',
+ dialect=oracle.dialect(use_ansi=False))
+
+ q = select([table1.c.name]).where(table1.c.name == 'foo')
+ self.assert_compile(q,
+ 'SELECT mytable.name FROM mytable WHERE '
+ 'mytable.name = :name_1',
+ dialect=oracle.dialect(use_ansi=False))
+ subq = select([table3.c.otherstuff]).where(table3.c.otherstuff
+ == table1.c.name).label('bar')
q = select([table1.c.name, subq])
- self.assert_compile(q,
- "SELECT mytable.name, "
- "(SELECT thirdtable.otherstuff FROM thirdtable "
- "WHERE thirdtable.otherstuff = mytable.name) AS bar FROM mytable",
- dialect=oracle.dialect(use_ansi=False))
+ self.assert_compile(q,
+ 'SELECT mytable.name, (SELECT '
+ 'thirdtable.otherstuff FROM thirdtable '
+ 'WHERE thirdtable.otherstuff = '
+ 'mytable.name) AS bar FROM mytable',
+ dialect=oracle.dialect(use_ansi=False))
def test_alias_outer_join(self):
- address_types = table('address_types',
- column('id'),
- column('name'),
- )
- addresses = table('addresses',
- column('id'),
- column('user_id'),
- column('address_type_id'),
- column('email_address')
- )
+ address_types = table('address_types', column('id'),
+ column('name'))
+ addresses = table('addresses', column('id'), column('user_id'),
+ column('address_type_id'),
+ column('email_address'))
at_alias = address_types.alias()
-
- s = select([at_alias, addresses]).\
- select_from(
- addresses.outerjoin(at_alias,
- addresses.c.address_type_id==at_alias.c.id)
- ).\
- where(addresses.c.user_id==7).\
- order_by(addresses.c.id, address_types.c.id)
- self.assert_compile(s,
- "SELECT address_types_1.id, address_types_1.name, addresses.id, "
- "addresses.user_id, "
- "addresses.address_type_id, addresses.email_address FROM addresses "
- "LEFT OUTER JOIN address_types address_types_1 "
- "ON addresses.address_type_id = address_types_1.id WHERE "
- "addresses.user_id = :user_id_1 ORDER BY addresses.id, "
- "address_types.id")
+ s = select([at_alias,
+ addresses]).select_from(addresses.outerjoin(at_alias,
+ addresses.c.address_type_id
+ == at_alias.c.id)).where(addresses.c.user_id
+ == 7).order_by(addresses.c.id, address_types.c.id)
+ self.assert_compile(s,
+ 'SELECT address_types_1.id, '
+ 'address_types_1.name, addresses.id, '
+ 'addresses.user_id, addresses.address_type_'
+ 'id, addresses.email_address FROM '
+ 'addresses LEFT OUTER JOIN address_types '
+ 'address_types_1 ON addresses.address_type_'
+ 'id = address_types_1.id WHERE '
+ 'addresses.user_id = :user_id_1 ORDER BY '
+ 'addresses.id, address_types.id')
def test_compound(self):
- t1 = table('t1', column('c1'), column('c2'), column('c3'), )
- t2 = table('t2', column('c1'), column('c2'), column('c3'), )
- self.assert_compile(
- union(t1.select(), t2.select()),
- "SELECT t1.c1, t1.c2, t1.c3 FROM t1 UNION SELECT t2.c1, t2.c2, t2.c3 FROM t2"
- )
- self.assert_compile(
- except_(t1.select(), t2.select()),
- "SELECT t1.c1, t1.c2, t1.c3 FROM t1 MINUS SELECT t2.c1, t2.c2, t2.c3 FROM t2"
- )
+ t1 = table('t1', column('c1'), column('c2'), column('c3'))
+ t2 = table('t2', column('c1'), column('c2'), column('c3'))
+ self.assert_compile(union(t1.select(), t2.select()),
+ 'SELECT t1.c1, t1.c2, t1.c3 FROM t1 UNION '
+ 'SELECT t2.c1, t2.c2, t2.c3 FROM t2')
+ self.assert_compile(except_(t1.select(), t2.select()),
+ 'SELECT t1.c1, t1.c2, t1.c3 FROM t1 MINUS '
+ 'SELECT t2.c1, t2.c2, t2.c3 FROM t2')
class CompatFlagsTest(TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
execute().fetchall()
def test_reflect_local_to_remote(self):
- testing.db.execute("CREATE TABLE localtable "
- "(id INTEGER PRIMARY KEY, parent_id INTEGER REFERENCES"
- " test_schema.parent(id))")
+ testing.db.execute('CREATE TABLE localtable (id INTEGER '
+ 'PRIMARY KEY, parent_id INTEGER REFERENCES '
+ 'test_schema.parent(id))')
try:
meta = MetaData(testing.db)
lcl = Table('localtable', meta, autoload=True)
parent = meta.tables['test_schema.parent']
- self.assert_compile(parent.join(lcl),
- "test_schema.parent JOIN localtable ON "
- "test_schema.parent.id = localtable.parent_id")
- select([parent, lcl]).\
- select_from(parent.join(lcl)).\
- execute().fetchall()
+ self.assert_compile(parent.join(lcl),
+ 'test_schema.parent JOIN localtable ON '
+ 'test_schema.parent.id = '
+ 'localtable.parent_id')
+ select([parent,
+ lcl]).select_from(parent.join(lcl)).execute().fetchall()
finally:
- testing.db.execute("DROP TABLE localtable")
+ testing.db.execute('DROP TABLE localtable')
def test_reflect_alt_owner_implicit(self):
meta = MetaData(testing.db)
- parent = Table('parent', meta, autoload=True, schema='test_schema')
- child = Table('child', meta, autoload=True, schema='test_schema')
+ parent = Table('parent', meta, autoload=True,
+ schema='test_schema')
+ child = Table('child', meta, autoload=True, schema='test_schema'
+ )
+ self.assert_compile(parent.join(child),
+ 'test_schema.parent JOIN test_schema.child '
+ 'ON test_schema.parent.id = '
+ 'test_schema.child.parent_id')
+ select([parent,
+ child]).select_from(parent.join(child)).execute().fetchall()
- self.assert_compile(parent.join(child),
- "test_schema.parent JOIN test_schema.child ON "
- "test_schema.parent.id = test_schema.child.parent_id")
- select([parent, child]).select_from(parent.join(child)).execute().fetchall()
-
def test_reflect_alt_owner_synonyms(self):
- testing.db.execute("CREATE TABLE localtable "
- "(id INTEGER PRIMARY KEY, parent_id INTEGER REFERENCES"
- " test_schema.ptable(id))")
+ testing.db.execute('CREATE TABLE localtable (id INTEGER '
+ 'PRIMARY KEY, parent_id INTEGER REFERENCES '
+ 'test_schema.ptable(id))')
try:
meta = MetaData(testing.db)
- lcl = Table('localtable', meta, autoload=True, oracle_resolve_synonyms=True)
+ lcl = Table('localtable', meta, autoload=True,
+ oracle_resolve_synonyms=True)
parent = meta.tables['test_schema.ptable']
- self.assert_compile(parent.join(lcl),
- "test_schema.ptable JOIN localtable ON "
- "test_schema.ptable.id = localtable.parent_id")
- select([parent, lcl]).select_from(parent.join(lcl)).execute().fetchall()
+ self.assert_compile(parent.join(lcl),
+ 'test_schema.ptable JOIN localtable ON '
+ 'test_schema.ptable.id = '
+ 'localtable.parent_id')
+ select([parent,
+ lcl]).select_from(parent.join(lcl)).execute().fetchall()
finally:
- testing.db.execute("DROP TABLE localtable")
-
+ testing.db.execute('DROP TABLE localtable')
+
def test_reflect_remote_synonyms(self):
meta = MetaData(testing.db)
- parent = Table('ptable', meta, autoload=True,
- schema='test_schema',
- oracle_resolve_synonyms=True)
- child = Table('ctable', meta, autoload=True,
- schema='test_schema',
- oracle_resolve_synonyms=True)
- self.assert_compile(parent.join(child),
- "test_schema.ptable JOIN test_schema.ctable ON "
- "test_schema.ptable.id = test_schema.ctable.parent_id")
- select([parent, child]).select_from(parent.join(child)).execute().fetchall()
+ parent = Table('ptable', meta, autoload=True,
+ schema='test_schema',
+ oracle_resolve_synonyms=True)
+ child = Table('ctable', meta, autoload=True,
+ schema='test_schema',
+ oracle_resolve_synonyms=True)
+ self.assert_compile(parent.join(child),
+ 'test_schema.ptable JOIN '
+ 'test_schema.ctable ON test_schema.ptable.i'
+ 'd = test_schema.ctable.parent_id')
+ select([parent,
+ child]).select_from(parent.join(child)).execute().fetchall()
class ConstraintTest(TestBase):
+
__only_on__ = 'oracle'
-
+
def setup(self):
global metadata
metadata = MetaData(testing.db)
-
- foo = Table('foo', metadata,
- Column('id', Integer, primary_key=True),
- )
+ foo = Table('foo', metadata, Column('id', Integer,
+ primary_key=True))
foo.create(checkfirst=True)
-
+
def teardown(self):
metadata.drop_all()
def test_oracle_has_no_on_update_cascade(self):
- bar = Table('bar', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('foo.id', onupdate="CASCADE"))
- )
+ bar = Table('bar', metadata, Column('id', Integer,
+ primary_key=True), Column('foo_id', Integer,
+ ForeignKey('foo.id', onupdate='CASCADE')))
assert_raises(exc.SAWarning, bar.create)
-
- bat = Table('bat', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer),
- ForeignKeyConstraint(['foo_id'], ['foo.id'], onupdate="CASCADE")
- )
+ bat = Table('bat', metadata, Column('id', Integer,
+ primary_key=True), Column('foo_id', Integer),
+ ForeignKeyConstraint(['foo_id'], ['foo.id'],
+ onupdate='CASCADE'))
assert_raises(exc.SAWarning, bat.create)
-
class TypesTest(TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
__dialect__ = oracle.OracleDialect()
finally:
t1.drop()
- @testing.fails_on('+zxjdbc',
- 'Not yet known how to pass values of the INTERVAL type')
+ @testing.fails_on('+zxjdbc',
+ 'Not yet known how to pass values of the '
+ 'INTERVAL type')
def test_interval(self):
-
- for type_, expected in [
- (oracle.INTERVAL(), "INTERVAL DAY TO SECOND"),
- (
- oracle.INTERVAL(day_precision=3),
- "INTERVAL DAY(3) TO SECOND"
- ),
- (
- oracle.INTERVAL(second_precision=5),
- "INTERVAL DAY TO SECOND(5)"
- ),
- (
- oracle.INTERVAL(day_precision=2, second_precision=5),
- "INTERVAL DAY(2) TO SECOND(5)"
- ),
- ]:
+ for type_, expected in [(oracle.INTERVAL(),
+ 'INTERVAL DAY TO SECOND'),
+ (oracle.INTERVAL(day_precision=3),
+ 'INTERVAL DAY(3) TO SECOND'),
+ (oracle.INTERVAL(second_precision=5),
+ 'INTERVAL DAY TO SECOND(5)'),
+ (oracle.INTERVAL(day_precision=2,
+ second_precision=5),
+ 'INTERVAL DAY(2) TO SECOND(5)')]:
self.assert_compile(type_, expected)
-
metadata = MetaData(testing.db)
- interval_table = Table("intervaltable", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("day_interval", oracle.INTERVAL(day_precision=3)),
- )
+ interval_table = Table('intervaltable', metadata, Column('id',
+ Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('day_interval',
+ oracle.INTERVAL(day_precision=3)))
metadata.create_all()
try:
- interval_table.insert().execute(
- day_interval=datetime.timedelta(days=35, seconds=5743),
- )
+ interval_table.insert().\
+ execute(day_interval=datetime.timedelta(days=35,
+ seconds=5743))
row = interval_table.select().execute().first()
- eq_(row['day_interval'], datetime.timedelta(days=35, seconds=5743))
+ eq_(row['day_interval'], datetime.timedelta(days=35,
+ seconds=5743))
finally:
metadata.drop_all()
(15.76, float),
)):
eq_(row[i], val)
- assert isinstance(row[i], type_), "%r is not %r" % (row[i], type_)
+ assert isinstance(row[i], type_), '%r is not %r' \
+ % (row[i], type_)
finally:
t1.drop()
stmt = """
SELECT
(SELECT (SELECT idata FROM foo) FROM DUAL) AS idata,
- (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata,
+ (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL)
+ AS ndata,
(SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata
FROM dual
"""
FROM (
SELECT
(SELECT (SELECT idata FROM foo) FROM DUAL) AS idata,
- (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata,
- (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata
+ (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2))
+ FROM DUAL) AS ndata,
+ (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL)
+ AS fdata
FROM dual
)
WHERE ROWNUM >= 0) anon_1
class DontReflectIOTTest(TestBase):
- """test that index overflow tables aren't included in table_names."""
+ """test that index overflow tables aren't included in
+ table_names."""
__only_on__ = 'oracle'
metadata.drop_all()
def test_reflect_functional_index(self):
- testing.db.execute("CREATE INDEX DATA_IDX ON TEST_INDEX_REFLECT (UPPER(DATA))")
+ testing.db.execute('CREATE INDEX DATA_IDX ON '
+ 'TEST_INDEX_REFLECT (UPPER(DATA))')
m2 = MetaData(testing.db)
t2 = Table('test_index_reflect', m2, autoload=True)
class SequenceTest(TestBase, AssertsCompiledSQL):
+
def test_basic(self):
- seq = Sequence("my_seq_no_schema")
+ seq = Sequence('my_seq_no_schema')
dialect = oracle.OracleDialect()
- assert dialect.identifier_preparer.format_sequence(seq) == "my_seq_no_schema"
-
- seq = Sequence("my_seq", schema="some_schema")
- assert dialect.identifier_preparer.format_sequence(seq) == "some_schema.my_seq"
-
- seq = Sequence("My_Seq", schema="Some_Schema")
- assert dialect.identifier_preparer.format_sequence(seq) == '"Some_Schema"."My_Seq"'
+ assert dialect.identifier_preparer.format_sequence(seq) \
+ == 'my_seq_no_schema'
+ seq = Sequence('my_seq', schema='some_schema')
+ assert dialect.identifier_preparer.format_sequence(seq) \
+ == 'some_schema.my_seq'
+ seq = Sequence('My_Seq', schema='Some_Schema')
+ assert dialect.identifier_preparer.format_sequence(seq) \
+ == '"Some_Schema"."My_Seq"'
class ExecuteTest(TestBase):
+
__only_on__ = 'oracle'
-
-
+
def test_basic(self):
- eq_(
- testing.db.execute("/*+ this is a comment */ SELECT 1 FROM DUAL").fetchall(),
- [(1,)]
- )
+ eq_(testing.db.execute('/*+ this is a comment */ SELECT 1 FROM '
+ 'DUAL').fetchall(), [(1, )])
def test_sequences_are_integers(self):
seq = Sequence('foo_seq')
import logging
class SequenceTest(TestBase, AssertsCompiledSQL):
+
def test_basic(self):
- seq = Sequence("my_seq_no_schema")
+ seq = Sequence('my_seq_no_schema')
dialect = postgresql.PGDialect()
- assert dialect.identifier_preparer.format_sequence(seq) == "my_seq_no_schema"
-
- seq = Sequence("my_seq", schema="some_schema")
- assert dialect.identifier_preparer.format_sequence(seq) == "some_schema.my_seq"
-
- seq = Sequence("My_Seq", schema="Some_Schema")
- assert dialect.identifier_preparer.format_sequence(seq) == '"Some_Schema"."My_Seq"'
+ assert dialect.identifier_preparer.format_sequence(seq) \
+ == 'my_seq_no_schema'
+ seq = Sequence('my_seq', schema='some_schema')
+ assert dialect.identifier_preparer.format_sequence(seq) \
+ == 'some_schema.my_seq'
+ seq = Sequence('My_Seq', schema='Some_Schema')
+ assert dialect.identifier_preparer.format_sequence(seq) \
+ == '"Some_Schema"."My_Seq"'
class CompileTest(TestBase, AssertsCompiledSQL):
+
__dialect__ = postgresql.dialect()
def test_update_returning(self):
dialect = postgresql.dialect()
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
- u = update(table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
- self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING mytable.myid, mytable.name", dialect=dialect)
-
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
+ u = update(table1, values=dict(name='foo'
+ )).returning(table1.c.myid, table1.c.name)
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=%(name)s '
+ 'RETURNING mytable.myid, mytable.name',
+ dialect=dialect)
u = update(table1, values=dict(name='foo')).returning(table1)
- self.assert_compile(u, "UPDATE mytable SET name=%(name)s "\
- "RETURNING mytable.myid, mytable.name, mytable.description", dialect=dialect)
-
- u = update(table1, values=dict(name='foo')).returning(func.length(table1.c.name))
- self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING length(mytable.name) AS length_1", dialect=dialect)
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=%(name)s '
+ 'RETURNING mytable.myid, mytable.name, '
+ 'mytable.description', dialect=dialect)
+ u = update(table1, values=dict(name='foo'
+ )).returning(func.length(table1.c.name))
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=%(name)s '
+ 'RETURNING length(mytable.name) AS length_1'
+ , dialect=dialect)
def test_insert_returning(self):
column('description', String(128)),
)
- i = insert(table1, values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) RETURNING mytable.myid, mytable.name", dialect=dialect)
-
+ i = insert(table1, values=dict(name='foo'
+ )).returning(table1.c.myid, table1.c.name)
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES '
+ '(%(name)s) RETURNING mytable.myid, '
+ 'mytable.name', dialect=dialect)
i = insert(table1, values=dict(name='foo')).returning(table1)
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) "\
- "RETURNING mytable.myid, mytable.name, mytable.description", dialect=dialect)
-
- i = insert(table1, values=dict(name='foo')).returning(func.length(table1.c.name))
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) RETURNING length(mytable.name) AS length_1", dialect=dialect)
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES '
+ '(%(name)s) RETURNING mytable.myid, '
+ 'mytable.name, mytable.description',
+ dialect=dialect)
+ i = insert(table1, values=dict(name='foo'
+ )).returning(func.length(table1.c.name))
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES '
+ '(%(name)s) RETURNING length(mytable.name) '
+ 'AS length_1', dialect=dialect)
- @testing.uses_deprecated(r".*argument is deprecated. Please use statement.returning.*")
+ @testing.uses_deprecated('.*argument is deprecated. Please use '
+ 'statement.returning.*')
def test_old_returning_names(self):
dialect = postgresql.dialect()
- table1 = table('mytable',
- column('myid', Integer),
- column('name', String(128)),
- column('description', String(128)),
- )
-
- u = update(table1, values=dict(name='foo'), postgres_returning=[table1.c.myid, table1.c.name])
- self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING mytable.myid, mytable.name", dialect=dialect)
-
- u = update(table1, values=dict(name='foo'), postgresql_returning=[table1.c.myid, table1.c.name])
- self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING mytable.myid, mytable.name", dialect=dialect)
-
- i = insert(table1, values=dict(name='foo'), postgres_returning=[table1.c.myid, table1.c.name])
- self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) RETURNING mytable.myid, mytable.name", dialect=dialect)
+ table1 = table('mytable', column('myid', Integer), column('name'
+ , String(128)), column('description',
+ String(128)))
+ u = update(table1, values=dict(name='foo'),
+ postgres_returning=[table1.c.myid, table1.c.name])
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=%(name)s '
+ 'RETURNING mytable.myid, mytable.name',
+ dialect=dialect)
+ u = update(table1, values=dict(name='foo'),
+ postgresql_returning=[table1.c.myid, table1.c.name])
+ self.assert_compile(u,
+ 'UPDATE mytable SET name=%(name)s '
+ 'RETURNING mytable.myid, mytable.name',
+ dialect=dialect)
+ i = insert(table1, values=dict(name='foo'),
+ postgres_returning=[table1.c.myid, table1.c.name])
+ self.assert_compile(i,
+ 'INSERT INTO mytable (name) VALUES '
+ '(%(name)s) RETURNING mytable.myid, '
+ 'mytable.name', dialect=dialect)
def test_create_partial_index(self):
m = MetaData()
- tbl = Table('testtbl', m, Column('data',Integer))
- idx = Index('test_idx1', tbl.c.data, postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10))
- idx = Index('test_idx1', tbl.c.data, postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10))
-
- # test quoting and all that
- idx2 = Index('test_idx2', tbl.c.data, postgresql_where=and_(tbl.c.data > 'a', tbl.c.data < "b's"))
+ tbl = Table('testtbl', m, Column('data', Integer))
+ idx = Index('test_idx1', tbl.c.data,
+ postgresql_where=and_(tbl.c.data > 5, tbl.c.data
+ < 10))
+ idx = Index('test_idx1', tbl.c.data,
+ postgresql_where=and_(tbl.c.data > 5, tbl.c.data
+ < 10))
- self.assert_compile(schema.CreateIndex(idx),
- "CREATE INDEX test_idx1 ON testtbl (data) WHERE data > 5 AND data < 10", dialect=postgresql.dialect())
+ # test quoting and all that
- self.assert_compile(schema.CreateIndex(idx2),
- "CREATE INDEX test_idx2 ON testtbl (data) WHERE data > 'a' AND data < 'b''s'", dialect=postgresql.dialect())
-
- @testing.uses_deprecated(r".*'postgres_where' argument has been renamed.*")
+ idx2 = Index('test_idx2', tbl.c.data,
+ postgresql_where=and_(tbl.c.data > 'a', tbl.c.data
+ < "b's"))
+ self.assert_compile(schema.CreateIndex(idx),
+ 'CREATE INDEX test_idx1 ON testtbl (data) '
+ 'WHERE data > 5 AND data < 10',
+ dialect=postgresql.dialect())
+ self.assert_compile(schema.CreateIndex(idx2),
+ "CREATE INDEX test_idx2 ON testtbl (data) "
+ "WHERE data > 'a' AND data < 'b''s'",
+ dialect=postgresql.dialect())
+
+ @testing.uses_deprecated(r".*'postgres_where' argument has been "
+ "renamed.*")
def test_old_create_partial_index(self):
- tbl = Table('testtbl', MetaData(), Column('data',Integer))
- idx = Index('test_idx1', tbl.c.data, postgres_where=and_(tbl.c.data > 5, tbl.c.data < 10))
+ tbl = Table('testtbl', MetaData(), Column('data', Integer))
+ idx = Index('test_idx1', tbl.c.data,
+ postgres_where=and_(tbl.c.data > 5, tbl.c.data
+ < 10))
+ self.assert_compile(schema.CreateIndex(idx),
+ 'CREATE INDEX test_idx1 ON testtbl (data) '
+ 'WHERE data > 5 AND data < 10',
+ dialect=postgresql.dialect())
- self.assert_compile(schema.CreateIndex(idx),
- "CREATE INDEX test_idx1 ON testtbl (data) WHERE data > 5 AND data < 10", dialect=postgresql.dialect())
-
def test_extract(self):
- t = table('t', column('col1', DateTime), column('col2', Date), column('col3', Time),
- column('col4', postgresql.INTERVAL)
- )
-
+ t = table('t', column('col1', DateTime), column('col2', Date),
+ column('col3', Time), column('col4',
+ postgresql.INTERVAL))
for field in 'year', 'month', 'day', 'epoch', 'hour':
- for expr, compiled_expr in [
-
- ( t.c.col1, "t.col1 :: timestamp" ),
- ( t.c.col2, "t.col2 :: date" ),
- ( t.c.col3, "t.col3 :: time" ),
+ for expr, compiled_expr in [ # invalid, no cast. plain
+ # text. no cast. addition is
+ # commutative subtraction is
+ # not invalid - no cast. dont
+ # crack up on entirely
+ # unsupported types
+ (t.c.col1, 't.col1 :: timestamp'),
+ (t.c.col2, 't.col2 :: date'),
+ (t.c.col3, 't.col3 :: time'),
(func.current_timestamp() - datetime.timedelta(days=5),
- "(CURRENT_TIMESTAMP - %(current_timestamp_1)s) :: timestamp"
- ),
- (func.current_timestamp() + func.current_timestamp(),
- "CURRENT_TIMESTAMP + CURRENT_TIMESTAMP" # invalid, no cast.
- ),
- (text("foo.date + foo.time"),
- "foo.date + foo.time" # plain text. no cast.
- ),
- (func.current_timestamp() + datetime.timedelta(days=5),
- "(CURRENT_TIMESTAMP + %(current_timestamp_1)s) :: timestamp"
- ),
- (t.c.col2 + t.c.col3,
- "(t.col2 + t.col3) :: timestamp"
- ),
- # addition is commutative
+ '(CURRENT_TIMESTAMP - %(current_timestamp_1)s) :: '
+ 'timestamp'),
+ (func.current_timestamp() + func.current_timestamp(),
+ 'CURRENT_TIMESTAMP + CURRENT_TIMESTAMP'),
+ (text('foo.date + foo.time'), 'foo.date + foo.time'),
+ (func.current_timestamp() + datetime.timedelta(days=5),
+ '(CURRENT_TIMESTAMP + %(current_timestamp_1)s) :: '
+ 'timestamp'),
+ (t.c.col2 + t.c.col3, '(t.col2 + t.col3) :: timestamp'
+ ),
(t.c.col2 + datetime.timedelta(days=5),
- "(t.col2 + %(col2_1)s) :: timestamp"
- ),
+ '(t.col2 + %(col2_1)s) :: timestamp'),
(datetime.timedelta(days=5) + t.c.col2,
- "(%(col2_1)s + t.col2) :: timestamp"
- ),
- (t.c.col1 + t.c.col4,
- "(t.col1 + t.col4) :: timestamp"
- ),
- # subtraction is not
+ '(%(col2_1)s + t.col2) :: timestamp'),
+ (t.c.col1 + t.c.col4, '(t.col1 + t.col4) :: timestamp'
+ ),
(t.c.col1 - datetime.timedelta(seconds=30),
- "(t.col1 - %(col1_1)s) :: timestamp"
- ),
+ '(t.col1 - %(col1_1)s) :: timestamp'),
(datetime.timedelta(seconds=30) - t.c.col1,
- "%(col1_1)s - t.col1" # invalid - no cast.
- ),
+ '%(col1_1)s - t.col1'),
(func.coalesce(t.c.col1, func.current_timestamp()),
- "coalesce(t.col1, CURRENT_TIMESTAMP) :: timestamp"
- ),
+ 'coalesce(t.col1, CURRENT_TIMESTAMP) :: timestamp'),
(t.c.col3 + datetime.timedelta(seconds=30),
- "(t.col3 + %(col3_1)s) :: time"
- ),
- (func.current_timestamp() - func.coalesce(t.c.col1, func.current_timestamp()),
- "(CURRENT_TIMESTAMP - coalesce(t.col1, CURRENT_TIMESTAMP)) :: interval",
- ),
+ '(t.col3 + %(col3_1)s) :: time'),
+ (func.current_timestamp() - func.coalesce(t.c.col1,
+ func.current_timestamp()),
+ '(CURRENT_TIMESTAMP - coalesce(t.col1, '
+ 'CURRENT_TIMESTAMP)) :: interval'),
(3 * func.foobar(type_=Interval),
- "(%(foobar_1)s * foobar()) :: interval"
- ),
- (literal(datetime.timedelta(seconds=10)) - literal(datetime.timedelta(seconds=10)),
- "(%(param_1)s - %(param_2)s) :: interval"
- ),
- (t.c.col3 + "some string", # dont crack up on entirely unsupported types
- "t.col3 + %(col3_1)s"
- )
- ]:
- self.assert_compile(
- select([extract(field, expr)]).select_from(t),
- "SELECT EXTRACT(%s FROM %s) AS anon_1 FROM t" % (
- field,
- compiled_expr
- )
- )
+ '(%(foobar_1)s * foobar()) :: interval'),
+ (literal(datetime.timedelta(seconds=10))
+ - literal(datetime.timedelta(seconds=10)),
+ '(%(param_1)s - %(param_2)s) :: interval'),
+ (t.c.col3 + 'some string', 't.col3 + %(col3_1)s'),
+ ]:
+ self.assert_compile(select([extract(field,
+ expr)]).select_from(t),
+ 'SELECT EXTRACT(%s FROM %s) AS '
+ 'anon_1 FROM t' % (field,
+ compiled_expr))
class FloatCoercionTest(TablesTest, AssertsExecutionResults):
__only_on__ = 'postgresql'
)
class EnumTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+
__only_on__ = 'postgresql'
__dialect__ = postgresql.dialect()
-
- def test_compile(self):
- e1 = Enum('x', 'y', 'z', name="somename")
- e2 = Enum('x', 'y', 'z', name="somename", schema='someschema')
-
- self.assert_compile(
- postgresql.CreateEnumType(e1),
- "CREATE TYPE somename AS ENUM ('x','y','z')"
- )
-
- self.assert_compile(
- postgresql.CreateEnumType(e2),
- "CREATE TYPE someschema.somename AS ENUM ('x','y','z')"
- )
-
- self.assert_compile(
- postgresql.DropEnumType(e1),
- "DROP TYPE somename"
- )
- self.assert_compile(
- postgresql.DropEnumType(e2),
- "DROP TYPE someschema.somename"
- )
-
+ def test_compile(self):
+ e1 = Enum('x', 'y', 'z', name='somename')
+ e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
+ self.assert_compile(postgresql.CreateEnumType(e1),
+ "CREATE TYPE somename AS ENUM ('x','y','z')"
+ )
+ self.assert_compile(postgresql.CreateEnumType(e2),
+ "CREATE TYPE someschema.somename AS ENUM "
+ "('x','y','z')")
+ self.assert_compile(postgresql.DropEnumType(e1),
+ 'DROP TYPE somename')
+ self.assert_compile(postgresql.DropEnumType(e2),
+ 'DROP TYPE someschema.somename')
t1 = Table('sometable', MetaData(), Column('somecolumn', e1))
- self.assert_compile(
- schema.CreateTable(t1),
- "CREATE TABLE sometable ("
- "somecolumn somename"
- ")"
- )
- t1 = Table('sometable', MetaData(),
- Column('somecolumn', Enum('x', 'y', 'z', native_enum=False))
- )
- self.assert_compile(
- schema.CreateTable(t1),
- "CREATE TABLE sometable ("
- "somecolumn VARCHAR(1), "
- "CHECK (somecolumn IN ('x', 'y', 'z'))"
- ")"
- )
-
+ self.assert_compile(schema.CreateTable(t1),
+ 'CREATE TABLE sometable (somecolumn '
+ 'somename)')
+ t1 = Table('sometable', MetaData(), Column('somecolumn',
+ Enum('x', 'y', 'z', native_enum=False)))
+ self.assert_compile(schema.CreateTable(t1),
+ "CREATE TABLE sometable (somecolumn "
+ "VARCHAR(1), CHECK (somecolumn IN ('x', "
+ "'y', 'z')))")
- @testing.fails_on('postgresql+zxjdbc',
- 'zxjdbc fails on ENUM: column "XXX" is of type XXX '
- 'but expression is of type character varying')
- @testing.fails_on('postgresql+pg8000',
- 'zxjdbc fails on ENUM: column "XXX" is of type XXX '
- 'but expression is of type text')
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc fails on ENUM: column "XXX" is of type '
+ 'XXX but expression is of type character varying')
+ @testing.fails_on('postgresql+pg8000',
+ 'zxjdbc fails on ENUM: column "XXX" is of type '
+ 'XXX but expression is of type text')
def test_create_table(self):
metadata = MetaData(testing.db)
- t1 = Table('table', metadata,
- Column('id', Integer, primary_key=True),
- Column('value', Enum('one', 'two', 'three', name='onetwothreetype'))
- )
+ t1 = Table('table', metadata, Column('id', Integer,
+ primary_key=True), Column('value', Enum('one', 'two'
+ , 'three', name='onetwothreetype')))
t1.create()
- t1.create(checkfirst=True) # check the create
+ t1.create(checkfirst=True) # check the create
try:
t1.insert().execute(value='two')
t1.insert().execute(value='three')
t1.insert().execute(value='three')
- eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
- [(1, 'two'), (2, 'three'), (3, 'three')]
- )
+ eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+ [(1, 'two'), (2, 'three'), (3, 'three')])
finally:
metadata.drop_all()
metadata.drop_all()
metadata = MetaData(testing.db)
etype = Enum('four', 'five', 'six', metadata=metadata)
assert_raises(exc.ArgumentError, etype.create)
- assert_raises(exc.ArgumentError, etype.compile, dialect=postgresql.dialect())
-
- @testing.fails_on('postgresql+zxjdbc',
- 'zxjdbc fails on ENUM: column "XXX" is of type XXX '
- 'but expression is of type character varying')
- @testing.fails_on('postgresql+pg8000',
- 'zxjdbc fails on ENUM: column "XXX" is of type XXX '
- 'but expression is of type text')
+ assert_raises(exc.ArgumentError, etype.compile,
+ dialect=postgresql.dialect())
+
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc fails on ENUM: column "XXX" is of type '
+ 'XXX but expression is of type character varying')
+ @testing.fails_on('postgresql+pg8000',
+ 'zxjdbc fails on ENUM: column "XXX" is of type '
+ 'XXX but expression is of type text')
def test_unicode_labels(self):
metadata = MetaData(testing.db)
t1 = Table('table', metadata,
Column('id', Integer, primary_key=True),
- Column('value', Enum(u'réveillé', u'drôle', u'S’il', name='onetwothreetype'))
+ Column('value',
+ Enum(u'réveillé', u'drôle', u'S’il',
+ name='onetwothreetype'))
)
+
metadata.create_all()
try:
t1.insert().execute(value=u'drôle')
eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
[(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')]
)
-
m2 = MetaData(testing.db)
t2 = Table('table', m2, autoload=True)
assert t2.c.value.type.enums == (u'réveillé', u'drôle', u'S’il')
-
finally:
metadata.drop_all()
-
+
def test_non_native_type(self):
metadata = MetaData()
- t1 = Table('foo', metadata,
- Column('bar', Enum('one', 'two', 'three', name='myenum', native_enum=False))
- )
-
+ t1 = Table('foo', metadata, Column('bar', Enum('one', 'two',
+ 'three', name='myenum', native_enum=False)))
+
def go():
t1.create(testing.db)
-
+
try:
- self.assert_sql(testing.db, go, [], with_sequences=[
- (
- "CREATE TABLE foo (\tbar VARCHAR(5), \t"
- "CONSTRAINT myenum CHECK (bar IN ('one', 'two', 'three')))",
- {}
- )]
- )
+ self.assert_sql(testing.db, go, [],
+ with_sequences=[("CREATE TABLE foo (\tbar "
+ "VARCHAR(5), \tCONSTRAINT myenum CHECK "
+ "(bar IN ('one', 'two', 'three')))", {})])
finally:
metadata.drop_all(testing.db)
-
+
def test_non_native_dialect(self):
engine = engines.testing_engine()
engine.connect()
engine.dialect.supports_native_enum = False
-
metadata = MetaData()
- t1 = Table('foo', metadata,
- Column('bar', Enum('one', 'two', 'three', name='myenum'))
- )
-
+ t1 = Table('foo', metadata, Column('bar', Enum('one', 'two',
+ 'three', name='myenum')))
+
def go():
t1.create(engine)
-
+
try:
- self.assert_sql(engine, go, [], with_sequences=[
- (
- "CREATE TABLE foo (\tbar VARCHAR(5), \t"
- "CONSTRAINT myenum CHECK (bar IN ('one', 'two', 'three')))",
- {}
- )]
- )
+ self.assert_sql(engine, go, [],
+ with_sequences=[("CREATE TABLE foo (\tbar "
+ "VARCHAR(5), \tCONSTRAINT myenum CHECK "
+ "(bar IN ('one', 'two', 'three')))", {})])
finally:
metadata.drop_all(engine)
-
+
def test_standalone_enum(self):
metadata = MetaData(testing.db)
- etype = Enum('four', 'five', 'six', name='fourfivesixtype', metadata=metadata)
+ etype = Enum('four', 'five', 'six', name='fourfivesixtype',
+ metadata=metadata)
etype.create()
try:
- assert testing.db.dialect.has_type(testing.db, 'fourfivesixtype')
+ assert testing.db.dialect.has_type(testing.db,
+ 'fourfivesixtype')
finally:
etype.drop()
- assert not testing.db.dialect.has_type(testing.db, 'fourfivesixtype')
-
+ assert not testing.db.dialect.has_type(testing.db,
+ 'fourfivesixtype')
metadata.create_all()
try:
- assert testing.db.dialect.has_type(testing.db, 'fourfivesixtype')
+ assert testing.db.dialect.has_type(testing.db,
+ 'fourfivesixtype')
finally:
metadata.drop_all()
- assert not testing.db.dialect.has_type(testing.db, 'fourfivesixtype')
-
+ assert not testing.db.dialect.has_type(testing.db,
+ 'fourfivesixtype')
+
def test_reflection(self):
metadata = MetaData(testing.db)
- etype = Enum('four', 'five', 'six', name='fourfivesixtype', metadata=metadata)
- t1 = Table('table', metadata,
- Column('id', Integer, primary_key=True),
- Column('value', Enum('one', 'two', 'three', name='onetwothreetype')),
- Column('value2', etype)
- )
+ etype = Enum('four', 'five', 'six', name='fourfivesixtype',
+ metadata=metadata)
+ t1 = Table('table', metadata, Column('id', Integer,
+ primary_key=True), Column('value', Enum('one', 'two'
+ , 'three', name='onetwothreetype')), Column('value2'
+ , etype))
metadata.create_all()
try:
m2 = MetaData(testing.db)
def test_schema_reflection(self):
metadata = MetaData(testing.db)
- etype = Enum('four', 'five', 'six',
- name='fourfivesixtype',
- schema='test_schema',
- metadata=metadata)
- t1 = Table('table', metadata,
- Column('id', Integer, primary_key=True),
- Column('value', Enum('one', 'two', 'three',
- name='onetwothreetype', schema='test_schema')),
- Column('value2', etype)
- )
+ etype = Enum(
+ 'four',
+ 'five',
+ 'six',
+ name='fourfivesixtype',
+ schema='test_schema',
+ metadata=metadata,
+ )
+ t1 = Table('table', metadata, Column('id', Integer,
+ primary_key=True), Column('value', Enum('one', 'two'
+ , 'three', name='onetwothreetype',
+ schema='test_schema')), Column('value2', etype))
metadata.create_all()
try:
m2 = MetaData(testing.db)
metadata.drop_all()
class InsertTest(TestBase, AssertsExecutionResults):
+
__only_on__ = 'postgresql'
@classmethod
def setup_class(cls):
global metadata
- cls.engine= testing.db
+ cls.engine = testing.db
metadata = MetaData(testing.db)
def teardown(self):
self.engine.dispose()
def test_compiled_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)))
-
+ table = Table('testtable', metadata, Column('id', Integer,
+ primary_key=True), Column('data', String(30)))
metadata.create_all()
-
- ins = table.insert(inline=True, values={'data':bindparam('x')}).compile()
- ins.execute({'x':"five"}, {'x':"seven"})
- assert table.select().execute().fetchall() == [(1, 'five'), (2, 'seven')]
+ ins = table.insert(inline=True, values={'data': bindparam('x'
+ )}).compile()
+ ins.execute({'x': 'five'}, {'x': 'seven'})
+ assert table.select().execute().fetchall() == [(1, 'five'), (2,
+ 'seven')]
def test_foreignkey_missing_insert(self):
- t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True)
- )
- t2 = Table('t2', metadata,
- Column('id', Integer, ForeignKey('t1.id'), primary_key=True)
- )
+ t1 = Table('t1', metadata, Column('id', Integer,
+ primary_key=True))
+ t2 = Table('t2', metadata, Column('id', Integer,
+ ForeignKey('t1.id'), primary_key=True))
metadata.create_all()
-
- # want to ensure that
- # "null value in column "id" violates not-null constraint" is raised (IntegrityError on psycoopg2,
- # but ProgrammingError on pg8000),
- # and not "ProgrammingError: (ProgrammingError) relationship "t2_id_seq" does not exist".
- # the latter corresponds to autoincrement behavior, which is not the case
- # here due to the foreign key.
- for eng in [
- engines.testing_engine(options={'implicit_returning':False}),
- engines.testing_engine(options={'implicit_returning':True}),
- ]:
- assert_raises_message(exc.DBAPIError, "violates not-null constraint", eng.execute, t2.insert())
-
+
+ # want to ensure that "null value in column "id" violates not-
+ # null constraint" is raised (IntegrityError on psycoopg2, but
+ # ProgrammingError on pg8000), and not "ProgrammingError:
+ # (ProgrammingError) relationship "t2_id_seq" does not exist".
+ # the latter corresponds to autoincrement behavior, which is not
+ # the case here due to the foreign key.
+
+ for eng in [engines.testing_engine(options={'implicit_returning'
+ : False}),
+ engines.testing_engine(options={'implicit_returning'
+ : True})]:
+ assert_raises_message(exc.DBAPIError,
+ 'violates not-null constraint',
+ eng.execute, t2.insert())
def test_sequence_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, Sequence('my_seq'), primary_key=True),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ Sequence('my_seq'), primary_key=True),
+ Column('data', String(30)))
metadata.create_all()
- self._assert_data_with_sequence(table, "my_seq")
+ self._assert_data_with_sequence(table, 'my_seq')
def test_sequence_returning_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, Sequence('my_seq'), primary_key=True),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ Sequence('my_seq'), primary_key=True),
+ Column('data', String(30)))
metadata.create_all()
- self._assert_data_with_sequence_returning(table, "my_seq")
+ self._assert_data_with_sequence_returning(table, 'my_seq')
def test_opt_sequence_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, Sequence('my_seq', optional=True), primary_key=True),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ Sequence('my_seq', optional=True),
+ primary_key=True), Column('data', String(30)))
metadata.create_all()
self._assert_data_autoincrement(table)
def test_opt_sequence_returning_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, Sequence('my_seq', optional=True), primary_key=True),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ Sequence('my_seq', optional=True),
+ primary_key=True), Column('data', String(30)))
metadata.create_all()
self._assert_data_autoincrement_returning(table)
def test_autoincrement_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ primary_key=True), Column('data', String(30)))
metadata.create_all()
self._assert_data_autoincrement(table)
def test_autoincrement_returning_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ primary_key=True), Column('data', String(30)))
metadata.create_all()
self._assert_data_autoincrement_returning(table)
def test_noautoincrement_insert(self):
- table = Table('testtable', metadata,
- Column('id', Integer, primary_key=True, autoincrement=False),
- Column('data', String(30)))
+ table = Table('testtable', metadata, Column('id', Integer,
+ primary_key=True, autoincrement=False),
+ Column('data', String(30)))
metadata.create_all()
self._assert_data_noautoincrement(table)
def _assert_data_autoincrement(self, table):
- self.engine = engines.testing_engine(options={'implicit_returning':False})
+ self.engine = \
+ engines.testing_engine(options={'implicit_returning'
+ : False})
metadata.bind = self.engine
def go():
+
# execute with explicit id
- r = table.insert().execute({'id':30, 'data':'d1'})
+
+ r = table.insert().execute({'id': 30, 'data': 'd1'})
assert r.inserted_primary_key == [30]
# execute with prefetch id
- r = table.insert().execute({'data':'d2'})
+
+ r = table.insert().execute({'data': 'd2'})
assert r.inserted_primary_key == [1]
# executemany with explicit ids
- table.insert().execute({'id':31, 'data':'d3'}, {'id':32, 'data':'d4'})
+
+ table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+ 'data': 'd4'})
# executemany, uses SERIAL
- table.insert().execute({'data':'d5'}, {'data':'d6'})
+
+ table.insert().execute({'data': 'd5'}, {'data': 'd6'})
# single execute, explicit id, inline
- table.insert(inline=True).execute({'id':33, 'data':'d7'})
+
+ table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
# single execute, inline, uses SERIAL
- table.insert(inline=True).execute({'data':'d8'})
- # note that the test framework doesnt capture the "preexecute" of a seqeuence
- # or default. we just see it in the bind params.
+ table.insert(inline=True).execute({'data': 'd8'})
- self.assert_sql(self.engine, go, [], with_sequences=[
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':30, 'data':'d1'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':1, 'data':'d2'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':31, 'data':'d3'}, {'id':32, 'data':'d4'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d5'}, {'data':'d6'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':33, 'data':'d7'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d8'}]
- ),
- ])
+ # note that the test framework doesnt capture the "preexecute"
+ # of a seqeuence or default. we just see it in the bind params.
+ self.assert_sql(self.engine, go, [], with_sequences=[
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 30, 'data': 'd1'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 1, 'data': 'd2'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd5'}, {'data': 'd6'}]),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 33, 'data': 'd7'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd8'}]),
+ ])
assert table.select().execute().fetchall() == [
(30, 'd1'),
(1, 'd2'),
(3, 'd6'),
(33, 'd7'),
(4, 'd8'),
- ]
+ ]
table.delete().execute()
- # test the same series of events using a reflected
- # version of the table
+ # test the same series of events using a reflected version of
+ # the table
+
m2 = MetaData(self.engine)
table = Table(table.name, m2, autoload=True)
def go():
- table.insert().execute({'id':30, 'data':'d1'})
- r = table.insert().execute({'data':'d2'})
+ table.insert().execute({'id': 30, 'data': 'd1'})
+ r = table.insert().execute({'data': 'd2'})
assert r.inserted_primary_key == [5]
- table.insert().execute({'id':31, 'data':'d3'}, {'id':32, 'data':'d4'})
- table.insert().execute({'data':'d5'}, {'data':'d6'})
- table.insert(inline=True).execute({'id':33, 'data':'d7'})
- table.insert(inline=True).execute({'data':'d8'})
+ table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+ 'data': 'd4'})
+ table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ table.insert(inline=True).execute({'data': 'd8'})
self.assert_sql(self.engine, go, [], with_sequences=[
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':30, 'data':'d1'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':5, 'data':'d2'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':31, 'data':'d3'}, {'id':32, 'data':'d4'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d5'}, {'data':'d6'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':33, 'data':'d7'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d8'}]
- ),
- ])
-
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 30, 'data': 'd1'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 5, 'data': 'd2'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd5'}, {'data': 'd6'}]),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 33, 'data': 'd7'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd8'}]),
+ ])
assert table.select().execute().fetchall() == [
(30, 'd1'),
(5, 'd2'),
(7, 'd6'),
(33, 'd7'),
(8, 'd8'),
- ]
+ ]
table.delete().execute()
def _assert_data_autoincrement_returning(self, table):
- self.engine = engines.testing_engine(options={'implicit_returning':True})
+ self.engine = \
+ engines.testing_engine(options={'implicit_returning': True})
metadata.bind = self.engine
def go():
+
# execute with explicit id
- r = table.insert().execute({'id':30, 'data':'d1'})
+
+ r = table.insert().execute({'id': 30, 'data': 'd1'})
assert r.inserted_primary_key == [30]
# execute with prefetch id
- r = table.insert().execute({'data':'d2'})
+
+ r = table.insert().execute({'data': 'd2'})
assert r.inserted_primary_key == [1]
# executemany with explicit ids
- table.insert().execute({'id':31, 'data':'d3'}, {'id':32, 'data':'d4'})
+
+ table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+ 'data': 'd4'})
# executemany, uses SERIAL
- table.insert().execute({'data':'d5'}, {'data':'d6'})
+
+ table.insert().execute({'data': 'd5'}, {'data': 'd6'})
# single execute, explicit id, inline
- table.insert(inline=True).execute({'id':33, 'data':'d7'})
+
+ table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
# single execute, inline, uses SERIAL
- table.insert(inline=True).execute({'data':'d8'})
-
- self.assert_sql(self.engine, go, [], with_sequences=[
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':30, 'data':'d1'}
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data) RETURNING testtable.id",
- {'data': 'd2'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':31, 'data':'d3'}, {'id':32, 'data':'d4'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d5'}, {'data':'d6'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':33, 'data':'d7'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d8'}]
- ),
- ])
+ table.insert(inline=True).execute({'data': 'd8'})
+
+ self.assert_sql(self.engine, go, [], with_sequences=[
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 30, 'data': 'd1'}),
+ ('INSERT INTO testtable (data) VALUES (:data) RETURNING '
+ 'testtable.id', {'data': 'd2'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd5'}, {'data': 'd6'}]),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 33, 'data': 'd7'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd8'}]),
+ ])
assert table.select().execute().fetchall() == [
(30, 'd1'),
(1, 'd2'),
(3, 'd6'),
(33, 'd7'),
(4, 'd8'),
- ]
+ ]
table.delete().execute()
- # test the same series of events using a reflected
- # version of the table
+ # test the same series of events using a reflected version of
+ # the table
+
m2 = MetaData(self.engine)
table = Table(table.name, m2, autoload=True)
def go():
- table.insert().execute({'id':30, 'data':'d1'})
- r = table.insert().execute({'data':'d2'})
+ table.insert().execute({'id': 30, 'data': 'd1'})
+ r = table.insert().execute({'data': 'd2'})
assert r.inserted_primary_key == [5]
- table.insert().execute({'id':31, 'data':'d3'}, {'id':32, 'data':'d4'})
- table.insert().execute({'data':'d5'}, {'data':'d6'})
- table.insert(inline=True).execute({'id':33, 'data':'d7'})
- table.insert(inline=True).execute({'data':'d8'})
+ table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+ 'data': 'd4'})
+ table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ table.insert(inline=True).execute({'data': 'd8'})
self.assert_sql(self.engine, go, [], with_sequences=[
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':30, 'data':'d1'}
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data) RETURNING testtable.id",
- {'data':'d2'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':31, 'data':'d3'}, {'id':32, 'data':'d4'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d5'}, {'data':'d6'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':33, 'data':'d7'}]
- ),
- (
- "INSERT INTO testtable (data) VALUES (:data)",
- [{'data':'d8'}]
- ),
- ])
-
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 30, 'data': 'd1'}),
+ ('INSERT INTO testtable (data) VALUES (:data) RETURNING '
+ 'testtable.id', {'data': 'd2'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd5'}, {'data': 'd6'}]),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 33, 'data': 'd7'}]),
+ ('INSERT INTO testtable (data) VALUES (:data)', [{'data'
+ : 'd8'}]),
+ ])
assert table.select().execute().fetchall() == [
(30, 'd1'),
(5, 'd2'),
(7, 'd6'),
(33, 'd7'),
(8, 'd8'),
- ]
+ ]
table.delete().execute()
def _assert_data_with_sequence(self, table, seqname):
- self.engine = engines.testing_engine(options={'implicit_returning':False})
+ self.engine = \
+ engines.testing_engine(options={'implicit_returning'
+ : False})
metadata.bind = self.engine
def go():
- table.insert().execute({'id':30, 'data':'d1'})
- table.insert().execute({'data':'d2'})
- table.insert().execute({'id':31, 'data':'d3'}, {'id':32, 'data':'d4'})
- table.insert().execute({'data':'d5'}, {'data':'d6'})
- table.insert(inline=True).execute({'id':33, 'data':'d7'})
- table.insert(inline=True).execute({'data':'d8'})
+ table.insert().execute({'id': 30, 'data': 'd1'})
+ table.insert().execute({'data': 'd2'})
+ table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+ 'data': 'd4'})
+ table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ table.insert(inline=True).execute({'data': 'd8'})
self.assert_sql(self.engine, go, [], with_sequences=[
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':30, 'data':'d1'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':1, 'data':'d2'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':31, 'data':'d3'}, {'id':32, 'data':'d4'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (nextval('%s'), :data)" % seqname,
- [{'data':'d5'}, {'data':'d6'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':33, 'data':'d7'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (nextval('%s'), :data)" % seqname,
- [{'data':'d8'}]
- ),
- ])
-
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 30, 'data': 'd1'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 1, 'data': 'd2'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 33, 'data': 'd7'}]),
+ ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd8'}]),
+ ])
assert table.select().execute().fetchall() == [
(30, 'd1'),
(1, 'd2'),
(3, 'd6'),
(33, 'd7'),
(4, 'd8'),
- ]
+ ]
# cant test reflection here since the Sequence must be
# explicitly specified
def _assert_data_with_sequence_returning(self, table, seqname):
- self.engine = engines.testing_engine(options={'implicit_returning':True})
+ self.engine = \
+ engines.testing_engine(options={'implicit_returning': True})
metadata.bind = self.engine
def go():
- table.insert().execute({'id':30, 'data':'d1'})
- table.insert().execute({'data':'d2'})
- table.insert().execute({'id':31, 'data':'d3'}, {'id':32, 'data':'d4'})
- table.insert().execute({'data':'d5'}, {'data':'d6'})
- table.insert(inline=True).execute({'id':33, 'data':'d7'})
- table.insert(inline=True).execute({'data':'d8'})
+ table.insert().execute({'id': 30, 'data': 'd1'})
+ table.insert().execute({'data': 'd2'})
+ table.insert().execute({'id': 31, 'data': 'd3'}, {'id': 32,
+ 'data': 'd4'})
+ table.insert().execute({'data': 'd5'}, {'data': 'd6'})
+ table.insert(inline=True).execute({'id': 33, 'data': 'd7'})
+ table.insert(inline=True).execute({'data': 'd8'})
self.assert_sql(self.engine, go, [], with_sequences=[
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- {'id':30, 'data':'d1'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (nextval('my_seq'), :data) RETURNING testtable.id",
- {'data':'d2'}
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':31, 'data':'d3'}, {'id':32, 'data':'d4'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (nextval('%s'), :data)" % seqname,
- [{'data':'d5'}, {'data':'d6'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (:id, :data)",
- [{'id':33, 'data':'d7'}]
- ),
- (
- "INSERT INTO testtable (id, data) VALUES (nextval('%s'), :data)" % seqname,
- [{'data':'d8'}]
- ),
- ])
-
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ {'id': 30, 'data': 'd1'}),
+ ("INSERT INTO testtable (id, data) VALUES "
+ "(nextval('my_seq'), :data) RETURNING testtable.id",
+ {'data': 'd2'}),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 31, 'data': 'd3'}, {'id': 32, 'data': 'd4'}]),
+ ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd5'}, {'data': 'd6'}]),
+ ('INSERT INTO testtable (id, data) VALUES (:id, :data)',
+ [{'id': 33, 'data': 'd7'}]),
+ ("INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
+ ":data)" % seqname, [{'data': 'd8'}]),
+ ])
assert table.select().execute().fetchall() == [
(30, 'd1'),
(1, 'd2'),
(3, 'd6'),
(33, 'd7'),
(4, 'd8'),
- ]
+ ]
# cant test reflection here since the Sequence must be
# explicitly specified
def _assert_data_noautoincrement(self, table):
- self.engine = engines.testing_engine(options={'implicit_returning':False})
+ self.engine = \
+ engines.testing_engine(options={'implicit_returning'
+ : False})
metadata.bind = self.engine
-
- table.insert().execute({'id':30, 'data':'d1'})
-
+ table.insert().execute({'id': 30, 'data': 'd1'})
if self.engine.driver == 'pg8000':
exception_cls = exc.ProgrammingError
elif self.engine.driver == 'pypostgresql':
exception_cls = Exception
else:
exception_cls = exc.IntegrityError
-
- assert_raises_message(exception_cls, "violates not-null constraint", table.insert().execute, {'data':'d2'})
- assert_raises_message(exception_cls, "violates not-null constraint", table.insert().execute, {'data':'d2'}, {'data':'d3'})
-
- assert_raises_message(exception_cls, "violates not-null constraint", table.insert().execute, {'data':'d2'})
-
- assert_raises_message(exception_cls, "violates not-null constraint", table.insert().execute, {'data':'d2'}, {'data':'d3'})
-
- table.insert().execute({'id':31, 'data':'d2'}, {'id':32, 'data':'d3'})
- table.insert(inline=True).execute({'id':33, 'data':'d4'})
-
- assert table.select().execute().fetchall() == [
- (30, 'd1'),
- (31, 'd2'),
- (32, 'd3'),
- (33, 'd4'),
- ]
+ assert_raises_message(exception_cls,
+ 'violates not-null constraint',
+ table.insert().execute, {'data': 'd2'})
+ assert_raises_message(exception_cls,
+ 'violates not-null constraint',
+ table.insert().execute, {'data': 'd2'},
+ {'data': 'd3'})
+ assert_raises_message(exception_cls,
+ 'violates not-null constraint',
+ table.insert().execute, {'data': 'd2'})
+ assert_raises_message(exception_cls,
+ 'violates not-null constraint',
+ table.insert().execute, {'data': 'd2'},
+ {'data': 'd3'})
+ table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
+ 'data': 'd3'})
+ table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
+ assert table.select().execute().fetchall() == [(30, 'd1'), (31,
+ 'd2'), (32, 'd3'), (33, 'd4')]
table.delete().execute()
- # test the same series of events using a reflected
- # version of the table
+ # test the same series of events using a reflected version of
+ # the table
+
m2 = MetaData(self.engine)
table = Table(table.name, m2, autoload=True)
- table.insert().execute({'id':30, 'data':'d1'})
-
- assert_raises_message(exception_cls, "violates not-null constraint", table.insert().execute, {'data':'d2'})
- assert_raises_message(exception_cls, "violates not-null constraint", table.insert().execute, {'data':'d2'}, {'data':'d3'})
-
- table.insert().execute({'id':31, 'data':'d2'}, {'id':32, 'data':'d3'})
- table.insert(inline=True).execute({'id':33, 'data':'d4'})
-
- assert table.select().execute().fetchall() == [
- (30, 'd1'),
- (31, 'd2'),
- (32, 'd3'),
- (33, 'd4'),
- ]
+ table.insert().execute({'id': 30, 'data': 'd1'})
+ assert_raises_message(exception_cls,
+ 'violates not-null constraint',
+ table.insert().execute, {'data': 'd2'})
+ assert_raises_message(exception_cls,
+ 'violates not-null constraint',
+ table.insert().execute, {'data': 'd2'},
+ {'data': 'd3'})
+ table.insert().execute({'id': 31, 'data': 'd2'}, {'id': 32,
+ 'data': 'd3'})
+ table.insert(inline=True).execute({'id': 33, 'data': 'd4'})
+ assert table.select().execute().fetchall() == [(30, 'd1'), (31,
+ 'd2'), (32, 'd3'), (33, 'd4')]
class DomainReflectionTest(TestBase, AssertsExecutionResults):
- "Test PostgreSQL domains"
+
+ """Test PostgreSQL domains"""
__only_on__ = 'postgresql'
@classmethod
def setup_class(cls):
con = testing.db.connect()
- for ddl in ('CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42',
- 'CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0'):
+ for ddl in \
+ 'CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42', \
+ 'CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0':
try:
con.execute(ddl)
except exc.SQLError, e:
- if not "already exists" in str(e):
+ if not 'already exists' in str(e):
raise e
- con.execute('CREATE TABLE testtable (question integer, answer testdomain)')
- con.execute('CREATE TABLE test_schema.testtable(question integer, answer test_schema.testdomain, anything integer)')
- con.execute('CREATE TABLE crosschema (question integer, answer test_schema.testdomain)')
+ con.execute('CREATE TABLE testtable (question integer, answer '
+ 'testdomain)')
+ con.execute('CREATE TABLE test_schema.testtable(question '
+ 'integer, answer test_schema.testdomain, anything '
+ 'integer)')
+ con.execute('CREATE TABLE crosschema (question integer, answer '
+ 'test_schema.testdomain)')
@classmethod
def teardown_class(cls):
def test_table_is_reflected(self):
metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
- eq_(set(table.columns.keys()), set(['question', 'answer']), "Columns of reflected table didn't equal expected columns")
+ eq_(set(table.columns.keys()), set(['question', 'answer']),
+ "Columns of reflected table didn't equal expected columns")
assert isinstance(table.c.answer.type, Integer)
def test_domain_is_reflected(self):
metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
- eq_(str(table.columns.answer.server_default.arg), '42', "Reflected default value didn't equal expected value")
- assert not table.columns.answer.nullable, "Expected reflected column to not be nullable."
+ eq_(str(table.columns.answer.server_default.arg), '42',
+ "Reflected default value didn't equal expected value")
+ assert not table.columns.answer.nullable, \
+ 'Expected reflected column to not be nullable.'
def test_table_is_reflected_test_schema(self):
metadata = MetaData(testing.db)
- table = Table('testtable', metadata, autoload=True, schema='test_schema')
- eq_(set(table.columns.keys()), set(['question', 'answer', 'anything']), "Columns of reflected table didn't equal expected columns")
+ table = Table('testtable', metadata, autoload=True,
+ schema='test_schema')
+ eq_(set(table.columns.keys()), set(['question', 'answer',
+ 'anything']),
+ "Columns of reflected table didn't equal expected columns")
assert isinstance(table.c.anything.type, Integer)
def test_schema_domain_is_reflected(self):
metadata = MetaData(testing.db)
- table = Table('testtable', metadata, autoload=True, schema='test_schema')
- eq_(str(table.columns.answer.server_default.arg), '0', "Reflected default value didn't equal expected value")
- assert table.columns.answer.nullable, "Expected reflected column to be nullable."
+ table = Table('testtable', metadata, autoload=True,
+ schema='test_schema')
+ eq_(str(table.columns.answer.server_default.arg), '0',
+ "Reflected default value didn't equal expected value")
+ assert table.columns.answer.nullable, \
+ 'Expected reflected column to be nullable.'
def test_crosschema_domain_is_reflected(self):
metadata = MetaData(testing.db)
table = Table('crosschema', metadata, autoload=True)
- eq_(str(table.columns.answer.server_default.arg), '0', "Reflected default value didn't equal expected value")
- assert table.columns.answer.nullable, "Expected reflected column to be nullable."
+ eq_(str(table.columns.answer.server_default.arg), '0',
+ "Reflected default value didn't equal expected value")
+ assert table.columns.answer.nullable, \
+ 'Expected reflected column to be nullable.'
def test_unknown_types(self):
from sqlalchemy.databases import postgresql
-
ischema_names = postgresql.PGDialect.ischema_names
postgresql.PGDialect.ischema_names = {}
try:
m2 = MetaData(testing.db)
- assert_raises(exc.SAWarning, Table, "testtable", m2, autoload=True)
+ assert_raises(exc.SAWarning, Table, 'testtable', m2,
+ autoload=True)
@testing.emits_warning('Did not recognize type')
def warns():
m3 = MetaData(testing.db)
- t3 = Table("testtable", m3, autoload=True)
+ t3 = Table('testtable', m3, autoload=True)
assert t3.c.answer.type.__class__ == sa.types.NullType
-
finally:
postgresql.PGDialect.ischema_names = ischema_names
class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+
__only_on__ = 'postgresql'
def test_date_reflection(self):
m1 = MetaData(testing.db)
- t1 = Table('pgdate', m1,
- Column('date1', DateTime(timezone=True)),
- Column('date2', DateTime(timezone=False))
- )
+ t1 = Table('pgdate', m1, Column('date1',
+ DateTime(timezone=True)), Column('date2',
+ DateTime(timezone=False)))
m1.create_all()
try:
m2 = MetaData(testing.db)
assert t2.c.date2.type.timezone is False
finally:
m1.drop_all()
-
- @testing.fails_on('+zxjdbc', 'The JDBC driver handles the version parsing')
+
+ @testing.fails_on('+zxjdbc',
+ 'The JDBC driver handles the version parsing')
def test_version_parsing(self):
+
+
class MockConn(object):
+
def __init__(self, res):
self.res = res
-
+
def execute(self, str):
return self
-
+
def scalar(self):
return self.res
-
- for string, version in [
- ("PostgreSQL 8.3.8 on i686-redhat-linux-gnu, compiled by GCC gcc (GCC) 4.1.2 20070925 (Red Hat 4.1.2-33)", (8, 3, 8)),
- ("PostgreSQL 8.5devel on x86_64-unknown-linux-gnu, compiled by GCC gcc (GCC) 4.4.2, 64-bit", (8, 5)),
- ]:
-
- eq_(testing.db.dialect._get_server_version_info(MockConn(string)), version)
-
+
+
+ for string, version in \
+ [('PostgreSQL 8.3.8 on i686-redhat-linux-gnu, compiled by '
+ 'GCC gcc (GCC) 4.1.2 20070925 (Red Hat 4.1.2-33)', (8, 3,
+ 8)),
+ ('PostgreSQL 8.5devel on x86_64-unknown-linux-gnu, '
+ 'compiled by GCC gcc (GCC) 4.4.2, 64-bit', (8, 5))]:
+ eq_(testing.db.dialect._get_server_version_info(MockConn(string)),
+ version)
+
@testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
def test_notice_logging(self):
log = logging.getLogger('sqlalchemy.dialects.postgresql')
conn = testing.db.connect()
trans = conn.begin()
try:
- conn.execute("create table foo (id serial primary key)")
+ conn.execute('create table foo (id serial primary key)')
finally:
trans.rollback()
finally:
log.removeHandler(buf)
log.setLevel(lev)
+ msgs = ' '.join(b.msg for b in buf.buffer)
+ assert 'will create implicit sequence' in msgs
+ assert 'will create implicit index' in msgs
- msgs = " ".join(b.msg for b in buf.buffer)
- assert "will create implicit sequence" in msgs
- assert "will create implicit index" in msgs
-
-
def test_pg_weirdchar_reflection(self):
meta1 = MetaData(testing.db)
- subject = Table("subject", meta1,
- Column("id$", Integer, primary_key=True),
- )
-
- referer = Table("referer", meta1,
- Column("id", Integer, primary_key=True),
- Column("ref", Integer, ForeignKey('subject.id$')),
- )
+ subject = Table('subject', meta1, Column('id$', Integer,
+ primary_key=True))
+ referer = Table('referer', meta1, Column('id', Integer,
+ primary_key=True), Column('ref', Integer,
+ ForeignKey('subject.id$')))
meta1.create_all()
try:
meta2 = MetaData(testing.db)
- subject = Table("subject", meta2, autoload=True)
- referer = Table("referer", meta2, autoload=True)
+ subject = Table('subject', meta2, autoload=True)
+ referer = Table('referer', meta2, autoload=True)
print str(subject.join(referer).onclause)
- self.assert_((subject.c['id$']==referer.c.ref).compare(subject.join(referer).onclause))
+ self.assert_((subject.c['id$']
+ == referer.c.ref).compare(
+ subject.join(referer).onclause))
finally:
meta1.drop_all()
- @testing.fails_on('+zxjdbc', "Can't infer the SQL type to use "
- "for an instance of "
- "org.python.core.PyObjectDerived.")
+ @testing.fails_on('+zxjdbc',
+ "Can't infer the SQL type to use for an instance "
+ "of org.python.core.PyObjectDerived.")
@testing.fails_on('+pg8000', "Can't determine correct type.")
def test_extract(self):
- fivedaysago = datetime.datetime.now() - datetime.timedelta(days=5)
- for field, exp in (
- ('year', fivedaysago.year),
- ('month', fivedaysago.month),
- ('day', fivedaysago.day),
- ):
- r = testing.db.execute(
- select([extract(field, func.now() + datetime.timedelta(days =-5))])
- ).scalar()
+ fivedaysago = datetime.datetime.now() \
+ - datetime.timedelta(days=5)
+ for field, exp in ('year', fivedaysago.year), ('month',
+ fivedaysago.month), ('day', fivedaysago.day):
+ r = testing.db.execute(select([extract(field, func.now()
+ + datetime.timedelta(days=-5))])).scalar()
eq_(r, exp)
-
def test_checksfor_sequence(self):
meta1 = MetaData(testing.db)
- t = Table('mytable', meta1,
- Column('col1', Integer, Sequence('fooseq')))
+ t = Table('mytable', meta1, Column('col1', Integer,
+ Sequence('fooseq')))
try:
- testing.db.execute("CREATE SEQUENCE fooseq")
+ testing.db.execute('CREATE SEQUENCE fooseq')
t.create(checkfirst=True)
finally:
t.drop(checkfirst=True)
def test_renamed_sequence_reflection(self):
m1 = MetaData(testing.db)
- t = Table('t', m1,
- Column('id', Integer, primary_key=True)
- )
+ t = Table('t', m1, Column('id', Integer, primary_key=True))
m1.create_all()
try:
m2 = MetaData(testing.db)
t2 = Table('t', m2, autoload=True, implicit_returning=False)
- eq_(t2.c.id.server_default.arg.text, "nextval('t_id_seq'::regclass)")
-
+ eq_(t2.c.id.server_default.arg.text,
+ "nextval('t_id_seq'::regclass)")
r = t2.insert().execute()
eq_(r.inserted_primary_key, [1])
-
- testing.db.connect().\
- execution_options(autocommit=True).\
- execute("alter table t_id_seq rename to foobar_id_seq")
-
+ testing.db.connect().execution_options(autocommit=True).\
+ execute('alter table t_id_seq rename to foobar_id_seq'
+ )
m3 = MetaData(testing.db)
t3 = Table('t', m3, autoload=True, implicit_returning=False)
- eq_(t3.c.id.server_default.arg.text, "nextval('foobar_id_seq'::regclass)")
-
+ eq_(t3.c.id.server_default.arg.text,
+ "nextval('foobar_id_seq'::regclass)")
r = t3.insert().execute()
eq_(r.inserted_primary_key, [2])
-
finally:
m1.drop_all()
-
-
+
def test_distinct_on(self):
- t = Table('mytable', MetaData(testing.db),
- Column('id', Integer, primary_key=True),
- Column('a', String(8)))
- eq_(
- str(t.select(distinct=t.c.a)),
- 'SELECT DISTINCT ON (mytable.a) mytable.id, mytable.a \n'
- 'FROM mytable')
- eq_(
- str(t.select(distinct=['id','a'])),
- 'SELECT DISTINCT ON (id, a) mytable.id, mytable.a \n'
- 'FROM mytable')
- eq_(
- str(t.select(distinct=[t.c.id, t.c.a])),
- 'SELECT DISTINCT ON (mytable.id, mytable.a) mytable.id, mytable.a \n'
- 'FROM mytable')
+ t = Table('mytable', MetaData(testing.db), Column('id',
+ Integer, primary_key=True), Column('a', String(8)))
+ eq_(str(t.select(distinct=t.c.a)),
+ 'SELECT DISTINCT ON (mytable.a) mytable.id, mytable.a '
+ '\nFROM mytable')
+ eq_(str(t.select(distinct=['id', 'a'])),
+ 'SELECT DISTINCT ON (id, a) mytable.id, mytable.a \nFROM '
+ 'mytable')
+ eq_(str(t.select(distinct=[t.c.id, t.c.a])),
+ 'SELECT DISTINCT ON (mytable.id, mytable.a) mytable.id, '
+ 'mytable.a \nFROM mytable')
def test_schema_reflection(self):
- """note: this test requires that the 'test_schema' schema be separate and accessible by the test user"""
+ """note: this test requires that the 'test_schema' schema be
+ separate and accessible by the test user"""
meta1 = MetaData(testing.db)
- users = Table('users', meta1,
- Column('user_id', Integer, primary_key = True),
- Column('user_name', String(30), nullable = False),
- schema="test_schema"
- )
-
- addresses = Table('email_addresses', meta1,
- Column('address_id', Integer, primary_key = True),
- Column('remote_user_id', Integer, ForeignKey(users.c.user_id)),
+ users = Table('users', meta1, Column('user_id', Integer,
+ primary_key=True), Column('user_name',
+ String(30), nullable=False), schema='test_schema')
+ addresses = Table(
+ 'email_addresses',
+ meta1,
+ Column('address_id', Integer, primary_key=True),
+ Column('remote_user_id', Integer,
+ ForeignKey(users.c.user_id)),
Column('email_address', String(20)),
- schema="test_schema"
- )
+ schema='test_schema',
+ )
meta1.create_all()
try:
meta2 = MetaData(testing.db)
- addresses = Table('email_addresses', meta2, autoload=True, schema="test_schema")
- users = Table('users', meta2, mustexist=True, schema="test_schema")
-
+ addresses = Table('email_addresses', meta2, autoload=True,
+ schema='test_schema')
+ users = Table('users', meta2, mustexist=True,
+ schema='test_schema')
print users
print addresses
j = join(users, addresses)
print str(j.onclause)
- self.assert_((users.c.user_id==addresses.c.remote_user_id).compare(j.onclause))
+ self.assert_((users.c.user_id
+ == addresses.c.remote_user_id).compare(j.onclause))
finally:
meta1.drop_all()
def test_schema_reflection_2(self):
meta1 = MetaData(testing.db)
- subject = Table("subject", meta1,
- Column("id", Integer, primary_key=True),
- )
-
- referer = Table("referer", meta1,
- Column("id", Integer, primary_key=True),
- Column("ref", Integer, ForeignKey('subject.id')),
- schema="test_schema")
+ subject = Table('subject', meta1, Column('id', Integer,
+ primary_key=True))
+ referer = Table('referer', meta1, Column('id', Integer,
+ primary_key=True), Column('ref', Integer,
+ ForeignKey('subject.id')), schema='test_schema')
meta1.create_all()
try:
meta2 = MetaData(testing.db)
- subject = Table("subject", meta2, autoload=True)
- referer = Table("referer", meta2, schema="test_schema", autoload=True)
+ subject = Table('subject', meta2, autoload=True)
+ referer = Table('referer', meta2, schema='test_schema',
+ autoload=True)
print str(subject.join(referer).onclause)
- self.assert_((subject.c.id==referer.c.ref).compare(subject.join(referer).onclause))
+ self.assert_((subject.c.id
+ == referer.c.ref).compare(
+ subject.join(referer).onclause))
finally:
meta1.drop_all()
def test_schema_reflection_3(self):
meta1 = MetaData(testing.db)
- subject = Table("subject", meta1,
- Column("id", Integer, primary_key=True),
- schema='test_schema_2'
- )
-
- referer = Table("referer", meta1,
- Column("id", Integer, primary_key=True),
- Column("ref", Integer, ForeignKey('test_schema_2.subject.id')),
- schema="test_schema")
-
+ subject = Table('subject', meta1, Column('id', Integer,
+ primary_key=True), schema='test_schema_2')
+ referer = Table('referer', meta1, Column('id', Integer,
+ primary_key=True), Column('ref', Integer,
+ ForeignKey('test_schema_2.subject.id')),
+ schema='test_schema')
meta1.create_all()
try:
meta2 = MetaData(testing.db)
- subject = Table("subject", meta2, autoload=True, schema="test_schema_2")
- referer = Table("referer", meta2, schema="test_schema", autoload=True)
+ subject = Table('subject', meta2, autoload=True,
+ schema='test_schema_2')
+ referer = Table('referer', meta2, schema='test_schema',
+ autoload=True)
print str(subject.join(referer).onclause)
- self.assert_((subject.c.id==referer.c.ref).compare(subject.join(referer).onclause))
+ self.assert_((subject.c.id
+ == referer.c.ref).compare(
+ subject.join(referer).onclause))
finally:
meta1.drop_all()
def test_schema_roundtrips(self):
meta = MetaData(testing.db)
- users = Table('users', meta,
- Column('id', Integer, primary_key=True),
- Column('name', String(50)), schema='test_schema')
+ users = Table('users', meta, Column('id', Integer,
+ primary_key=True), Column('name', String(50)),
+ schema='test_schema')
users.create()
try:
users.insert().execute(id=1, name='name1')
users.insert().execute(id=2, name='name2')
users.insert().execute(id=3, name='name3')
users.insert().execute(id=4, name='name4')
-
- eq_(users.select().where(users.c.name=='name2').execute().fetchall(), [(2, 'name2')])
- eq_(users.select(use_labels=True).where(users.c.name=='name2').execute().fetchall(), [(2, 'name2')])
-
- users.delete().where(users.c.id==3).execute()
- eq_(users.select().where(users.c.name=='name3').execute().fetchall(), [])
-
- users.update().where(users.c.name=='name4').execute(name='newname')
- eq_(users.select(use_labels=True).where(users.c.id==4).execute().fetchall(), [(4, 'newname')])
-
+ eq_(users.select().where(users.c.name == 'name2'
+ ).execute().fetchall(), [(2, 'name2')])
+ eq_(users.select(use_labels=True).where(users.c.name
+ == 'name2').execute().fetchall(), [(2, 'name2')])
+ users.delete().where(users.c.id == 3).execute()
+ eq_(users.select().where(users.c.name == 'name3'
+ ).execute().fetchall(), [])
+ users.update().where(users.c.name == 'name4'
+ ).execute(name='newname')
+ eq_(users.select(use_labels=True).where(users.c.id
+ == 4).execute().fetchall(), [(4, 'newname')])
finally:
users.drop()
def test_preexecute_passivedefault(self):
- """test that when we get a primary key column back
- from reflecting a table which has a default value on it, we pre-execute
- that DefaultClause upon insert."""
+ """test that when we get a primary key column back from
+ reflecting a table which has a default value on it, we pre-
+ execute that DefaultClause upon insert."""
try:
meta = MetaData(testing.db)
user_password VARCHAR NOT NULL
);
""")
-
- t = Table("speedy_users", meta, autoload=True)
- r = t.insert().execute(user_name='user', user_password='lala')
+ t = Table('speedy_users', meta, autoload=True)
+ r = t.insert().execute(user_name='user',
+ user_password='lala')
assert r.inserted_primary_key == [1]
l = t.select().execute().fetchall()
assert l == [(1, 'user', 'lala')]
finally:
- testing.db.execute("drop table speedy_users")
+ testing.db.execute('drop table speedy_users')
@testing.emits_warning()
def test_index_reflection(self):
- """ Reflecting partial & expression-based indexes should warn """
+ """ Reflecting partial & expression-based indexes should warn
+ """
+
import warnings
+
def capture_warnings(*args, **kw):
capture_warnings._orig_showwarning(*args, **kw)
capture_warnings.warnings.append(args)
+
capture_warnings._orig_showwarning = warnings.warn
capture_warnings.warnings = []
-
m1 = MetaData(testing.db)
- t1 = Table('party', m1,
- Column('id', String(10), nullable=False),
- Column('name', String(20), index=True),
- Column('aname', String(20))
- )
+ t1 = Table('party', m1, Column('id', String(10),
+ nullable=False), Column('name', String(20),
+ index=True), Column('aname', String(20)))
m1.create_all()
-
testing.db.execute("""
create index idx1 on party ((id || name))
- """)
+ """)
testing.db.execute("""
create unique index idx2 on party (id) where name = 'test'
""")
-
testing.db.execute("""
create index idx3 on party using btree
(lower(name::text), lower(aname::text))
""")
-
try:
m2 = MetaData(testing.db)
-
warnings.warn = capture_warnings
t2 = Table('party', m2, autoload=True)
-
wrn = capture_warnings.warnings
- assert str(wrn[0][0]) == (
- "Skipped unsupported reflection of expression-based index idx1")
- assert str(wrn[1][0]) == (
- "Predicate of partial index idx2 ignored during reflection")
+ assert str(wrn[0][0]) \
+ == 'Skipped unsupported reflection of '\
+ 'expression-based index idx1'
+ assert str(wrn[1][0]) \
+ == 'Predicate of partial index idx2 ignored during '\
+ 'reflection'
assert len(t2.indexes) == 2
+
# Make sure indexes are in the order we expect them in
+
tmp = [(idx.name, idx) for idx in t2.indexes]
tmp.sort()
-
r1, r2 = [idx[1] for idx in tmp]
-
assert r1.name == 'idx2'
assert r1.unique == True
assert r2.unique == False
warnings.warn = capture_warnings._orig_showwarning
m1.drop_all()
- @testing.fails_on('postgresql+pypostgresql', 'pypostgresql bombs on multiple calls')
+ @testing.fails_on('postgresql+pypostgresql',
+ 'pypostgresql bombs on multiple calls')
def test_set_isolation_level(self):
"""Test setting the isolation level with create_engine"""
+
eng = create_engine(testing.db.url)
- eq_(
- eng.execute("show transaction isolation level").scalar(),
+ eq_(eng.execute('show transaction isolation level').scalar(),
'read committed')
- eng = create_engine(testing.db.url, isolation_level="SERIALIZABLE")
- eq_(
- eng.execute("show transaction isolation level").scalar(),
+ eng = create_engine(testing.db.url,
+ isolation_level='SERIALIZABLE')
+ eq_(eng.execute('show transaction isolation level').scalar(),
'serializable')
- eng = create_engine(testing.db.url, isolation_level="FOO")
-
+ eng = create_engine(testing.db.url, isolation_level='FOO')
if testing.db.driver == 'zxjdbc':
exception_cls = eng.dialect.dbapi.Error
else:
exception_cls = eng.dialect.dbapi.ProgrammingError
- assert_raises(exception_cls, eng.execute, "show transaction isolation level")
-
- @testing.fails_on('+zxjdbc',
- "psycopg2/pg8000 specific assertion")
- @testing.fails_on('pypostgresql',
- "psycopg2/pg8000 specific assertion")
+ assert_raises(exception_cls, eng.execute,
+ 'show transaction isolation level')
+
+ @testing.fails_on('+zxjdbc', 'psycopg2/pg8000 specific assertion')
+ @testing.fails_on('pypostgresql',
+ 'psycopg2/pg8000 specific assertion')
def test_numeric_raise(self):
- stmt = text("select cast('hi' as char) as hi", typemap={'hi':Numeric})
- assert_raises(
- exc.InvalidRequestError,
- testing.db.execute, stmt
- )
+ stmt = text("select cast('hi' as char) as hi", typemap={'hi'
+ : Numeric})
+ assert_raises(exc.InvalidRequestError, testing.db.execute, stmt)
class TimezoneTest(TestBase):
- """Test timezone-aware datetimes.
- psycopg will return a datetime with a tzinfo attached to it, if postgresql
- returns it. python then will not let you compare a datetime with a tzinfo
- to a datetime that doesnt have one. this test illustrates two ways to
- have datetime types with and without timezone info.
- """
+ """Test timezone-aware datetimes.
+
+ psycopg will return a datetime with a tzinfo attached to it, if
+ postgresql returns it. python then will not let you compare a
+ datetime with a tzinfo to a datetime that doesnt have one. this
+ test illustrates two ways to have datetime types with and without
+ timezone info. """
__only_on__ = 'postgresql'
global tztable, notztable, metadata
metadata = MetaData(testing.db)
- # current_timestamp() in postgresql is assumed to return TIMESTAMP WITH TIMEZONE
- tztable = Table('tztable', metadata,
- Column("id", Integer, primary_key=True),
- Column("date", DateTime(timezone=True), onupdate=func.current_timestamp()),
- Column("name", String(20)),
- )
- notztable = Table('notztable', metadata,
- Column("id", Integer, primary_key=True),
- Column("date", DateTime(timezone=False), onupdate=cast(func.current_timestamp(), DateTime(timezone=False))),
- Column("name", String(20)),
- )
+ # current_timestamp() in postgresql is assumed to return
+ # TIMESTAMP WITH TIMEZONE
+
+ tztable = Table('tztable', metadata, Column('id', Integer,
+ primary_key=True), Column('date',
+ DateTime(timezone=True),
+ onupdate=func.current_timestamp()),
+ Column('name', String(20)))
+ notztable = Table('notztable', metadata, Column('id', Integer,
+ primary_key=True), Column('date',
+ DateTime(timezone=False),
+ onupdate=cast(func.current_timestamp(),
+ DateTime(timezone=False))), Column('name',
+ String(20)))
metadata.create_all()
@classmethod
metadata.drop_all()
def test_with_timezone(self):
+
# get a date with a tzinfo
- somedate = testing.db.connect().scalar(func.current_timestamp().select())
+
+ somedate = \
+ testing.db.connect().scalar(func.current_timestamp().select())
assert somedate.tzinfo
-
tztable.insert().execute(id=1, name='row1', date=somedate)
-
- row = select([tztable.c.date], tztable.c.id==1).execute().first()
+ row = select([tztable.c.date], tztable.c.id
+ == 1).execute().first()
eq_(row[0], somedate)
- eq_(somedate.tzinfo.utcoffset(somedate), row[0].tzinfo.utcoffset(row[0]))
-
- result = tztable.update(tztable.c.id==1).\
- returning(tztable.c.date).execute(name='newname')
+ eq_(somedate.tzinfo.utcoffset(somedate),
+ row[0].tzinfo.utcoffset(row[0]))
+ result = tztable.update(tztable.c.id
+ == 1).returning(tztable.c.date).\
+ execute(name='newname'
+ )
row = result.first()
assert row[0] >= somedate
def test_without_timezone(self):
+
# get a date without a tzinfo
- somedate = datetime.datetime(2005, 10, 20, 11, 52, 0)
+
+ somedate = datetime.datetime( 2005, 10, 20, 11, 52, 0, )
assert not somedate.tzinfo
-
notztable.insert().execute(id=1, name='row1', date=somedate)
-
- row = select([notztable.c.date], notztable.c.id==1).execute().first()
+ row = select([notztable.c.date], notztable.c.id
+ == 1).execute().first()
eq_(row[0], somedate)
eq_(row[0].tzinfo, None)
-
- result = notztable.update(notztable.c.id==1).\
- returning(notztable.c.date).execute(name='newname')
+ result = notztable.update(notztable.c.id
+ == 1).returning(notztable.c.date).\
+ execute(name='newname'
+ )
row = result.first()
assert row[0] >= somedate
class TimePrecisionTest(TestBase, AssertsCompiledSQL):
+
__dialect__ = postgresql.dialect()
-
+
def test_compile(self):
- for (type_, expected) in [
- (postgresql.TIME(), "TIME WITHOUT TIME ZONE"),
- (postgresql.TIME(precision=5), "TIME(5) WITHOUT TIME ZONE"),
- (postgresql.TIME(timezone=True, precision=5), "TIME(5) WITH TIME ZONE"),
- (postgresql.TIMESTAMP(), "TIMESTAMP WITHOUT TIME ZONE"),
- (postgresql.TIMESTAMP(precision=5), "TIMESTAMP(5) WITHOUT TIME ZONE"),
- (postgresql.TIMESTAMP(timezone=True, precision=5), "TIMESTAMP(5) WITH TIME ZONE"),
- ]:
+ for type_, expected in [
+ (postgresql.TIME(), 'TIME WITHOUT TIME ZONE'),
+ (postgresql.TIME(precision=5), 'TIME(5) WITHOUT TIME ZONE'
+ ),
+ (postgresql.TIME(timezone=True, precision=5),
+ 'TIME(5) WITH TIME ZONE'),
+ (postgresql.TIMESTAMP(), 'TIMESTAMP WITHOUT TIME ZONE'),
+ (postgresql.TIMESTAMP(precision=5),
+ 'TIMESTAMP(5) WITHOUT TIME ZONE'),
+ (postgresql.TIMESTAMP(timezone=True, precision=5),
+ 'TIMESTAMP(5) WITH TIME ZONE'),
+ ]:
self.assert_compile(type_, expected)
-
+
@testing.only_on('postgresql', 'DB specific feature')
def test_reflection(self):
m1 = MetaData(testing.db)
- t1 = Table('t1', m1,
+ t1 = Table(
+ 't1',
+ m1,
Column('c1', postgresql.TIME()),
Column('c2', postgresql.TIME(precision=5)),
- Column('c3', postgresql.TIME(timezone=True, precision=5)),
- Column('c4', postgresql.TIMESTAMP()),
- Column('c5', postgresql.TIMESTAMP(precision=5)),
- Column('c6', postgresql.TIMESTAMP(timezone=True, precision=5)),
-
- )
+ Column('c3', postgresql.TIME(timezone=True, precision=5)),
+ Column('c4', postgresql.TIMESTAMP()),
+ Column('c5', postgresql.TIMESTAMP(precision=5)),
+ Column('c6', postgresql.TIMESTAMP(timezone=True,
+ precision=5)),
+ )
t1.create()
try:
m2 = MetaData(testing.db)
finally:
t1.drop()
-
-
class ArrayTest(TestBase, AssertsExecutionResults):
+
__only_on__ = 'postgresql'
@classmethod
def setup_class(cls):
global metadata, arrtable
metadata = MetaData(testing.db)
-
- arrtable = Table('arrtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('intarr', postgresql.PGArray(Integer)),
- Column('strarr', postgresql.PGArray(Unicode()), nullable=False)
- )
+ arrtable = Table('arrtable', metadata, Column('id', Integer,
+ primary_key=True), Column('intarr',
+ postgresql.PGArray(Integer)), Column('strarr',
+ postgresql.PGArray(Unicode()), nullable=False))
metadata.create_all()
def teardown(self):
arrtable.delete().execute()
-
+
@classmethod
def teardown_class(cls):
metadata.drop_all()
assert isinstance(tbl.c.intarr.type.item_type, Integer)
assert isinstance(tbl.c.strarr.type.item_type, String)
- @testing.fails_on('postgresql+zxjdbc', 'zxjdbc has no support for PG arrays')
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc has no support for PG arrays')
def test_insert_array(self):
- arrtable.insert().execute(intarr=[1,2,3], strarr=[u'abc', u'def'])
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
+ u'def'])
results = arrtable.select().execute().fetchall()
eq_(len(results), 1)
- eq_(results[0]['intarr'], [1,2,3])
- eq_(results[0]['strarr'], ['abc','def'])
+ eq_(results[0]['intarr'], [1, 2, 3])
+ eq_(results[0]['strarr'], ['abc', 'def'])
- @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays')
- @testing.fails_on('postgresql+zxjdbc', 'zxjdbc has no support for PG arrays')
+ @testing.fails_on('postgresql+pg8000',
+ 'pg8000 has poor support for PG arrays')
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc has no support for PG arrays')
def test_array_where(self):
- arrtable.insert().execute(intarr=[1,2,3], strarr=[u'abc', u'def'])
- arrtable.insert().execute(intarr=[4,5,6], strarr=u'ABC')
- results = arrtable.select().where(arrtable.c.intarr == [1,2,3]).execute().fetchall()
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
+ u'def'])
+ arrtable.insert().execute(intarr=[4, 5, 6], strarr=u'ABC')
+ results = arrtable.select().where(arrtable.c.intarr == [1, 2,
+ 3]).execute().fetchall()
eq_(len(results), 1)
- eq_(results[0]['intarr'], [1,2,3])
-
- @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays')
- @testing.fails_on('postgresql+pypostgresql', 'pypostgresql fails in coercing an array')
- @testing.fails_on('postgresql+zxjdbc', 'zxjdbc has no support for PG arrays')
+ eq_(results[0]['intarr'], [1, 2, 3])
+
+ @testing.fails_on('postgresql+pg8000',
+ 'pg8000 has poor support for PG arrays')
+ @testing.fails_on('postgresql+pypostgresql',
+ 'pypostgresql fails in coercing an array')
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc has no support for PG arrays')
def test_array_concat(self):
- arrtable.insert().execute(intarr=[1,2,3], strarr=[u'abc', u'def'])
- results = select([arrtable.c.intarr + [4,5,6]]).execute().fetchall()
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
+ u'def'])
+ results = select([arrtable.c.intarr + [4, 5,
+ 6]]).execute().fetchall()
eq_(len(results), 1)
- eq_(results[0][0], [1,2,3,4,5,6])
+ eq_(results[0][0], [ 1, 2, 3, 4, 5, 6, ])
- @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays')
- @testing.fails_on('postgresql+zxjdbc', 'zxjdbc has no support for PG arrays')
+ @testing.fails_on('postgresql+pg8000',
+ 'pg8000 has poor support for PG arrays')
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc has no support for PG arrays')
def test_array_subtype_resultprocessor(self):
- arrtable.insert().execute(intarr=[4,5,6], strarr=[[u'm\xe4\xe4'], [u'm\xf6\xf6']])
- arrtable.insert().execute(intarr=[1,2,3], strarr=[u'm\xe4\xe4', u'm\xf6\xf6'])
- results = arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
+ arrtable.insert().execute(intarr=[4, 5, 6],
+ strarr=[[u'm\xe4\xe4'], [u'm\xf6\xf6'
+ ]])
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'm\xe4\xe4'
+ , u'm\xf6\xf6'])
+ results = \
+ arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
eq_(len(results), 2)
eq_(results[0]['strarr'], [u'm\xe4\xe4', u'm\xf6\xf6'])
eq_(results[1]['strarr'], [[u'm\xe4\xe4'], [u'm\xf6\xf6']])
- @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays')
- @testing.fails_on('postgresql+zxjdbc', 'zxjdbc has no support for PG arrays')
+ @testing.fails_on('postgresql+pg8000',
+ 'pg8000 has poor support for PG arrays')
+ @testing.fails_on('postgresql+zxjdbc',
+ 'zxjdbc has no support for PG arrays')
def test_array_mutability(self):
- class Foo(object): pass
- footable = Table('foo', metadata,
- Column('id', Integer, primary_key=True),
- Column('intarr', postgresql.PGArray(Integer), nullable=True)
- )
+
+ class Foo(object):
+ pass
+
+ footable = Table('foo', metadata, Column('id', Integer,
+ primary_key=True), Column('intarr',
+ postgresql.PGArray(Integer), nullable=True))
mapper(Foo, footable)
metadata.create_all()
sess = create_session()
-
foo = Foo()
foo.id = 1
- foo.intarr = [1,2,3]
+ foo.intarr = [1, 2, 3]
sess.add(foo)
sess.flush()
sess.expunge_all()
foo = sess.query(Foo).get(1)
- eq_(foo.intarr, [1,2,3])
-
+ eq_(foo.intarr, [1, 2, 3])
foo.intarr.append(4)
sess.flush()
sess.expunge_all()
foo = sess.query(Foo).get(1)
- eq_(foo.intarr, [1,2,3,4])
-
+ eq_(foo.intarr, [1, 2, 3, 4])
foo.intarr = []
sess.flush()
sess.expunge_all()
eq_(foo.intarr, [])
-
foo.intarr = None
sess.flush()
sess.expunge_all()
eq_(foo.intarr, None)
# Errors in r4217:
+
foo = Foo()
foo.id = 2
sess.add(foo)
eq_(result[0], datetime.datetime(2007, 12, 25, 0, 0))
class ServerSideCursorsTest(TestBase, AssertsExecutionResults):
+
__only_on__ = 'postgresql+psycopg2'
@classmethod
def setup_class(cls):
global ss_engine
- ss_engine = engines.testing_engine(options={'server_side_cursors':True})
+ ss_engine = \
+ engines.testing_engine(options={'server_side_cursors'
+ : True})
@classmethod
def teardown_class(cls):
ss_engine.dispose()
def test_uses_ss(self):
- result = ss_engine.execute("select 1")
+ result = ss_engine.execute('select 1')
assert result.cursor.name
-
- result = ss_engine.execute(text("select 1"))
+ result = ss_engine.execute(text('select 1'))
assert result.cursor.name
-
result = ss_engine.execute(select([1]))
assert result.cursor.name
def test_uses_ss_when_explicitly_enabled(self):
- engine = engines.testing_engine(options={'server_side_cursors':False})
- result = engine.execute(text("select 1"))
+ engine = engines.testing_engine(options={'server_side_cursors'
+ : False})
+ result = engine.execute(text('select 1'))
+
# It should be off globally ...
- assert not result.cursor.name
+ assert not result.cursor.name
s = select([1]).execution_options(stream_results=True)
result = engine.execute(s)
+
# ... but enabled for this one.
+
assert result.cursor.name
# and this one
- result = engine.connect().execution_options(stream_results=True).execute("select 1")
+
+ result = \
+ engine.connect().execution_options(stream_results=True).\
+ execute('select 1'
+ )
assert result.cursor.name
-
+
# not this one
- result = engine.connect().execution_options(stream_results=False).execute(s)
+
+ result = \
+ engine.connect().execution_options(stream_results=False).\
+ execute(s)
assert not result.cursor.name
-
+
def test_ss_explicitly_disabled(self):
s = select([1]).execution_options(stream_results=False)
result = ss_engine.execute(s)
assert not result.cursor.name
def test_aliases_and_ss(self):
- engine = engines.testing_engine(options={'server_side_cursors':False})
+ engine = engines.testing_engine(options={'server_side_cursors'
+ : False})
s1 = select([1]).execution_options(stream_results=True).alias()
result = engine.execute(s1)
assert result.cursor.name
- # s1's options shouldn't affect s2 when s2 is used as a from_obj.
+ # s1's options shouldn't affect s2 when s2 is used as a
+ # from_obj.
+
s2 = select([1], from_obj=s1)
result = engine.execute(s2)
assert not result.cursor.name
s1 = select([1], for_update=True)
result = ss_engine.execute(s1)
assert result.cursor.name
-
result = ss_engine.execute('SELECT 1 FOR UPDATE')
assert result.cursor.name
def test_orm_queries_with_ss(self):
metadata = MetaData(testing.db)
- class Foo(object): pass
- footable = Table('foobar', metadata,
- Column('id', Integer, primary_key=True),
- )
+
+
+ class Foo(object):
+
+ pass
+
+
+ footable = Table('foobar', metadata, Column('id', Integer,
+ primary_key=True))
mapper(Foo, footable)
metadata.create_all()
try:
sess = create_session()
-
- engine = engines.testing_engine(options={'server_side_cursors':False})
+ engine = \
+ engines.testing_engine(options={'server_side_cursors'
+ : False})
result = engine.execute(sess.query(Foo).statement)
assert not result.cursor.name, result.cursor.name
result.close()
-
q = sess.query(Foo).execution_options(stream_results=True)
result = engine.execute(q.statement)
assert result.cursor.name
result.close()
-
- result = sess.query(Foo).execution_options(stream_results=True).subquery().execute()
+ result = \
+ sess.query(Foo).execution_options(stream_results=True).\
+ subquery().execute()
assert result.cursor.name
result.close()
finally:
metadata.drop_all()
-
+
def test_text_with_ss(self):
- engine = engines.testing_engine(options={'server_side_cursors':False})
+ engine = engines.testing_engine(options={'server_side_cursors'
+ : False})
s = text('select 42')
result = engine.execute(s)
assert not result.cursor.name
result = engine.execute(s)
assert result.cursor.name
-
def test_roundtrip(self):
test_table = Table('test_table', MetaData(ss_engine),
- Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
test_table.create(checkfirst=True)
try:
test_table.insert().execute(data='data1')
-
nextid = ss_engine.execute(Sequence('test_table_id_seq'))
test_table.insert().execute(id=nextid, data='data2')
-
- eq_(test_table.select().execute().fetchall(), [(1, 'data1'), (2, 'data2')])
-
- test_table.update().where(test_table.c.id==2).values(data=test_table.c.data + ' updated').execute()
- eq_(test_table.select().execute().fetchall(), [(1, 'data1'), (2, 'data2 updated')])
+ eq_(test_table.select().execute().fetchall(), [(1, 'data1'
+ ), (2, 'data2')])
+ test_table.update().where(test_table.c.id
+ == 2).values(data=test_table.c.data + ' updated'
+ ).execute()
+ eq_(test_table.select().execute().fetchall(), [(1, 'data1'
+ ), (2, 'data2 updated')])
test_table.delete().execute()
eq_(test_table.count().scalar(), 0)
finally:
assert t.c.precision_interval.type.precision == 3
class MatchTest(TestBase, AssertsCompiledSQL):
+
__only_on__ = 'postgresql'
- __excluded_on__ = (('postgresql', '<', (8, 3, 0)),)
+ __excluded_on__ = ('postgresql', '<', (8, 3, 0)),
@classmethod
def setup_class(cls):
global metadata, cattable, matchtable
metadata = MetaData(testing.db)
-
- cattable = Table('cattable', metadata,
- Column('id', Integer, primary_key=True),
- Column('description', String(50)),
- )
- matchtable = Table('matchtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('title', String(200)),
- Column('category_id', Integer, ForeignKey('cattable.id')),
- )
+ cattable = Table('cattable', metadata, Column('id', Integer,
+ primary_key=True), Column('description',
+ String(50)))
+ matchtable = Table('matchtable', metadata, Column('id',
+ Integer, primary_key=True), Column('title',
+ String(200)), Column('category_id', Integer,
+ ForeignKey('cattable.id')))
metadata.create_all()
-
- cattable.insert().execute([
- {'id': 1, 'description': 'Python'},
- {'id': 2, 'description': 'Ruby'},
- ])
- matchtable.insert().execute([
- {'id': 1, 'title': 'Agile Web Development with Rails', 'category_id': 2},
- {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
- {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
- {'id': 4, 'title': 'The Definitive Guide to Django', 'category_id': 1},
- {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}
- ])
+ cattable.insert().execute([{'id': 1, 'description': 'Python'},
+ {'id': 2, 'description': 'Ruby'}])
+ matchtable.insert().execute([{'id': 1, 'title'
+ : 'Agile Web Development with Rails'
+ , 'category_id': 2},
+ {'id': 2,
+ 'title': 'Dive Into Python',
+ 'category_id': 1},
+ {'id': 3, 'title'
+ : "Programming Matz's Ruby",
+ 'category_id': 2},
+ {'id': 4, 'title'
+ : 'The Definitive Guide to Django',
+ 'category_id': 1},
+ {'id': 5, 'title'
+ : 'Python in a Nutshell',
+ 'category_id': 1}])
@classmethod
def teardown_class(cls):
@testing.fails_on('postgresql+pg8000', 'uses positional')
@testing.fails_on('postgresql+zxjdbc', 'uses qmark')
def test_expression_pyformat(self):
- self.assert_compile(matchtable.c.title.match('somstr'), "matchtable.title @@ to_tsquery(%(title_1)s)")
+ self.assert_compile(matchtable.c.title.match('somstr'),
+ 'matchtable.title @@ to_tsquery(%(title_1)s'
+ ')')
@testing.fails_on('postgresql+psycopg2', 'uses pyformat')
@testing.fails_on('postgresql+pypostgresql', 'uses pyformat')
@testing.fails_on('postgresql+zxjdbc', 'uses qmark')
def test_expression_positional(self):
- self.assert_compile(matchtable.c.title.match('somstr'), "matchtable.title @@ to_tsquery(%s)")
+ self.assert_compile(matchtable.c.title.match('somstr'),
+ 'matchtable.title @@ to_tsquery(%s)')
def test_simple_match(self):
- results = matchtable.select().where(matchtable.c.title.match('python')).order_by(matchtable.c.id).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match('python'
+ )).order_by(matchtable.c.id).execute().fetchall()
eq_([2, 5], [r.id for r in results])
def test_simple_match_with_apostrophe(self):
- results = matchtable.select().where(matchtable.c.title.match("Matz's")).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match("Matz's"
+ )).execute().fetchall()
eq_([3], [r.id for r in results])
def test_simple_derivative_match(self):
- results = matchtable.select().where(matchtable.c.title.match('nutshells')).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match('nutshells'
+ )).execute().fetchall()
eq_([5], [r.id for r in results])
def test_or_match(self):
- results1 = matchtable.select().where(or_(matchtable.c.title.match('nutshells'),
- matchtable.c.title.match('rubies'))
- ).order_by(matchtable.c.id).execute().fetchall()
+ results1 = \
+ matchtable.select().where(or_(matchtable.c.title.match('nutshells'
+ ), matchtable.c.title.match('rubies'
+ ))).order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results1])
- results2 = matchtable.select().where(matchtable.c.title.match('nutshells | rubies'),
- ).order_by(matchtable.c.id).execute().fetchall()
+ results2 = \
+ matchtable.select().where(
+ matchtable.c.title.match('nutshells | rubies'
+ )).order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results2])
-
def test_and_match(self):
- results1 = matchtable.select().where(and_(matchtable.c.title.match('python'),
- matchtable.c.title.match('nutshells'))
- ).execute().fetchall()
+ results1 = \
+ matchtable.select().where(and_(matchtable.c.title.match('python'
+ ), matchtable.c.title.match('nutshells'
+ ))).execute().fetchall()
eq_([5], [r.id for r in results1])
- results2 = matchtable.select().where(matchtable.c.title.match('python & nutshells'),
- ).execute().fetchall()
+ results2 = \
+ matchtable.select().where(
+ matchtable.c.title.match('python & nutshells'
+ )).execute().fetchall()
eq_([5], [r.id for r in results2])
def test_match_across_joins(self):
- results = matchtable.select().where(and_(cattable.c.id==matchtable.c.category_id,
- or_(cattable.c.description.match('Ruby'),
- matchtable.c.title.match('nutshells')))
- ).order_by(matchtable.c.id).execute().fetchall()
+ results = matchtable.select().where(and_(cattable.c.id
+ == matchtable.c.category_id,
+ or_(cattable.c.description.match('Ruby'),
+ matchtable.c.title.match('nutshells'
+ )))).order_by(matchtable.c.id).execute().fetchall()
eq_([1, 3, 5], [r.id for r in results])
-
-
"""SQLite-specific tests."""
-from sqlalchemy.test.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.test.testing import eq_, assert_raises, \
+ assert_raises_message
import datetime
from sqlalchemy import *
from sqlalchemy import exc, sql, schema
-from sqlalchemy.dialects.sqlite import base as sqlite, pysqlite as pysqlite_dialect
+from sqlalchemy.dialects.sqlite import base as sqlite, \
+ pysqlite as pysqlite_dialect
from sqlalchemy.test import *
class TestTypes(TestBase, AssertsExecutionResults):
+
__only_on__ = 'sqlite'
def test_boolean(self):
"""Test that the boolean only treats 1 as True
-
+
"""
meta = MetaData(testing.db)
- t = Table('bool_table', meta,
- Column('id', Integer, primary_key=True),
- Column('boo', Boolean(create_constraint=False)))
-
+ t = Table('bool_table', meta, Column('id', Integer,
+ primary_key=True), Column('boo',
+ Boolean(create_constraint=False)))
try:
meta.create_all()
- testing.db.execute("INSERT INTO bool_table (id, boo) VALUES (1, 'false');")
- testing.db.execute("INSERT INTO bool_table (id, boo) VALUES (2, 'true');")
- testing.db.execute("INSERT INTO bool_table (id, boo) VALUES (3, '1');")
- testing.db.execute("INSERT INTO bool_table (id, boo) VALUES (4, '0');")
- testing.db.execute("INSERT INTO bool_table (id, boo) VALUES (5, 1);")
- testing.db.execute("INSERT INTO bool_table (id, boo) VALUES (6, 0);")
- eq_(
- t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
- [(3, True,), (5, True,)]
- )
+ testing.db.execute("INSERT INTO bool_table (id, boo) "
+ "VALUES (1, 'false');")
+ testing.db.execute("INSERT INTO bool_table (id, boo) "
+ "VALUES (2, 'true');")
+ testing.db.execute("INSERT INTO bool_table (id, boo) "
+ "VALUES (3, '1');")
+ testing.db.execute("INSERT INTO bool_table (id, boo) "
+ "VALUES (4, '0');")
+ testing.db.execute('INSERT INTO bool_table (id, boo) '
+ 'VALUES (5, 1);')
+ testing.db.execute('INSERT INTO bool_table (id, boo) '
+ 'VALUES (6, 0);')
+ eq_(t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
+ [(3, True), (5, True)])
finally:
meta.drop_all()
def test_string_dates_raise(self):
- assert_raises(TypeError,
- testing.db.execute,
- select([1]).where(bindparam("date", type_=Date)),
- date=str(datetime.date(2007, 10, 30)))
-
+ assert_raises(TypeError, testing.db.execute,
+ select([1]).where(bindparam('date', type_=Date)),
+ date=str(datetime.date(2007, 10, 30)))
+
def test_time_microseconds(self):
- dt = datetime.datetime(2008, 6, 27, 12, 0, 0, 125) # 125 usec
+ dt = datetime.datetime(2008, 6, 27, 12, 0, 0, 125, )
eq_(str(dt), '2008-06-27 12:00:00.000125')
sldt = sqlite.DATETIME()
bp = sldt.bind_processor(None)
eq_(bp(dt), '2008-06-27 12:00:00.000125')
-
rp = sldt.result_processor(None, None)
eq_(rp(bp(dt)), dt)
-
+
def test_native_datetime(self):
dbapi = testing.db.dialect.dbapi
-
- connect_args={'detect_types': dbapi.PARSE_DECLTYPES|dbapi.PARSE_COLNAMES}
- engine = engines.testing_engine(options={'connect_args':connect_args, 'native_datetime':True})
-
- t = Table('datetest', MetaData(),
- Column('id', Integer, primary_key=True),
- Column('d1', Date),
- Column('d2', TIMESTAMP)
- )
+ connect_args = {'detect_types': dbapi.PARSE_DECLTYPES \
+ | dbapi.PARSE_COLNAMES}
+ engine = engines.testing_engine(options={'connect_args'
+ : connect_args, 'native_datetime': True})
+ t = Table('datetest', MetaData(), Column('id', Integer,
+ primary_key=True), Column('d1', Date), Column('d2',
+ TIMESTAMP))
t.create(engine)
try:
- engine.execute(t.insert(), {'d1':datetime.date(2010, 5, 10), 'd2':datetime.datetime(2010, 5, 10, 12, 15, 25)})
+ engine.execute(t.insert(), {'d1': datetime.date(2010, 5,
+ 10),
+ 'd2': datetime.datetime( 2010, 5, 10, 12, 15, 25,
+ )})
row = engine.execute(t.select()).first()
- eq_(row, (1, datetime.date(2010, 5, 10), datetime.datetime(2010, 5, 10, 12, 15, 25)))
-
+ eq_(row, (1, datetime.date(2010, 5, 10),
+ datetime.datetime( 2010, 5, 10, 12, 15, 25, )))
r = engine.execute(func.current_date()).scalar()
assert isinstance(r, basestring)
-
finally:
t.drop(engine)
engine.dispose()
-
def test_no_convert_unicode(self):
"""test no utf-8 encoding occurs"""
-
+
dialect = sqlite.dialect()
for t in (
- String(convert_unicode=True),
- CHAR(convert_unicode=True),
- Unicode(),
- UnicodeText(),
- String(convert_unicode=True),
- CHAR(convert_unicode=True),
- Unicode(),
- UnicodeText()
+ String(convert_unicode=True),
+ CHAR(convert_unicode=True),
+ Unicode(),
+ UnicodeText(),
+ String(convert_unicode=True),
+ CHAR(convert_unicode=True),
+ Unicode(),
+ UnicodeText(),
):
-
bindproc = t.dialect_impl(dialect).bind_processor(dialect)
- assert not bindproc or isinstance(bindproc(u"some string"), unicode)
+ assert not bindproc or isinstance(bindproc(u'some string'),
+ unicode)
def test_type_reflection(self):
+
# (ask_for, roundtripped_as_if_different)
- specs = [( String(), String(), ),
- ( String(1), String(1), ),
- ( String(3), String(3), ),
- ( Text(), Text(), ),
- ( Unicode(), String(), ),
- ( Unicode(1), String(1), ),
- ( Unicode(3), String(3), ),
- ( UnicodeText(), Text(), ),
- ( CHAR(1), ),
- ( CHAR(3), CHAR(3), ),
- ( NUMERIC, NUMERIC(), ),
- ( NUMERIC(10,2), NUMERIC(10,2), ),
- ( Numeric, NUMERIC(), ),
- ( Numeric(10, 2), NUMERIC(10, 2), ),
- ( DECIMAL, DECIMAL(), ),
- ( DECIMAL(10, 2), DECIMAL(10, 2), ),
- ( Float, Float(), ),
- ( NUMERIC(), ),
- ( TIMESTAMP, TIMESTAMP(), ),
- ( DATETIME, DATETIME(), ),
- ( DateTime, DateTime(), ),
- ( DateTime(), ),
- ( DATE, DATE(), ),
- ( Date, Date(), ),
- ( TIME, TIME(), ),
- ( Time, Time(), ),
- ( BOOLEAN, BOOLEAN(), ),
- ( Boolean, Boolean(), ),
- ]
- columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
+ specs = [
+ (String(), String()),
+ (String(1), String(1)),
+ (String(3), String(3)),
+ (Text(), Text()),
+ (Unicode(), String()),
+ (Unicode(1), String(1)),
+ (Unicode(3), String(3)),
+ (UnicodeText(), Text()),
+ (CHAR(1), ),
+ (CHAR(3), CHAR(3)),
+ (NUMERIC, NUMERIC()),
+ (NUMERIC(10, 2), NUMERIC(10, 2)),
+ (Numeric, NUMERIC()),
+ (Numeric(10, 2), NUMERIC(10, 2)),
+ (DECIMAL, DECIMAL()),
+ (DECIMAL(10, 2), DECIMAL(10, 2)),
+ (Float, Float()),
+ (NUMERIC(), ),
+ (TIMESTAMP, TIMESTAMP()),
+ (DATETIME, DATETIME()),
+ (DateTime, DateTime()),
+ (DateTime(), ),
+ (DATE, DATE()),
+ (Date, Date()),
+ (TIME, TIME()),
+ (Time, Time()),
+ (BOOLEAN, BOOLEAN()),
+ (Boolean, Boolean()),
+ ]
+ columns = [Column('c%i' % (i + 1), t[0]) for (i, t) in
+ enumerate(specs)]
db = testing.db
m = MetaData(db)
t_table = Table('types', m, *columns)
try:
db.execute('CREATE VIEW types_v AS SELECT * from types')
rv = Table('types_v', m2, autoload=True)
-
expected = [len(c) > 1 and c[1] or c[0] for c in specs]
for table in rt, rv:
for i, reflected in enumerate(table.c):
- assert isinstance(reflected.type, type(expected[i])), "%d: %r" % (i, type(expected[i]))
+ assert isinstance(reflected.type,
+ type(expected[i])), '%d: %r' % (i,
+ type(expected[i]))
finally:
db.execute('DROP VIEW types_v')
finally:
class TestDefaults(TestBase, AssertsExecutionResults):
+
__only_on__ = 'sqlite'
- @testing.exclude('sqlite', '<', (3, 3, 8),
- "sqlite3 changesets 3353 and 3440 modified behavior of default displayed in pragma table_info()")
+ @testing.exclude('sqlite', '<', (3, 3, 8),
+ 'sqlite3 changesets 3353 and 3440 modified '
+ 'behavior of default displayed in pragma '
+ 'table_info()')
def test_default_reflection(self):
+
# (ask_for, roundtripped_as_if_different)
- specs = [( String(3), '"foo"' ),
- ( NUMERIC(10,2), '100.50' ),
- ( Integer, '5' ),
- ( Boolean, 'False' ),
- ]
- columns = [Column('c%i' % (i + 1), t[0], server_default=text(t[1])) for i, t in enumerate(specs)]
+ specs = [(String(3), '"foo"'), (NUMERIC(10, 2), '100.50'),
+ (Integer, '5'), (Boolean, 'False')]
+ columns = [Column('c%i' % (i + 1), t[0],
+ server_default=text(t[1])) for (i, t) in
+ enumerate(specs)]
db = testing.db
m = MetaData(db)
t_table = Table('t_defaults', m, *columns)
-
try:
m.create_all()
-
m2 = MetaData(db)
rt = Table('t_defaults', m2, autoload=True)
expected = [c[1] for c in specs]
finally:
m.drop_all()
- @testing.exclude('sqlite', '<', (3, 3, 8),
- "sqlite3 changesets 3353 and 3440 modified behavior of default displayed in pragma table_info()")
+ @testing.exclude('sqlite', '<', (3, 3, 8),
+ 'sqlite3 changesets 3353 and 3440 modified '
+ 'behavior of default displayed in pragma '
+ 'table_info()')
def test_default_reflection_2(self):
db = testing.db
m = MetaData(db)
-
- expected = ["my_default", '0']
- table = """CREATE TABLE r_defaults (
+ expected = ['my_default', '0']
+ table = \
+ """CREATE TABLE r_defaults (
data VARCHAR(40) DEFAULT 'my_default',
val INTEGER NOT NULL DEFAULT 0
)"""
-
try:
db.execute(table)
-
rt = Table('r_defaults', m, autoload=True)
for i, reflected in enumerate(rt.c):
eq_(str(reflected.server_default.arg), expected[i])
finally:
- db.execute("DROP TABLE r_defaults")
+ db.execute('DROP TABLE r_defaults')
class DialectTest(TestBase, AssertsExecutionResults):
+
__only_on__ = 'sqlite'
def test_extra_reserved_words(self):
"""Tests reserved words in identifiers.
-
+
'true', 'false', and 'column' are undocumented reserved words
- when used as column identifiers (as of 3.5.1). Covering them here
- to ensure they remain in place if the dialect's reserved_words set
- is updated in the future.
- """
+ when used as column identifiers (as of 3.5.1). Covering them
+ here to ensure they remain in place if the dialect's
+ reserved_words set is updated in the future. """
meta = MetaData(testing.db)
- t = Table('reserved', meta,
- Column('safe', Integer),
- Column('true', Integer),
- Column('false', Integer),
- Column('column', Integer))
-
+ t = Table(
+ 'reserved',
+ meta,
+ Column('safe', Integer),
+ Column('true', Integer),
+ Column('false', Integer),
+ Column('column', Integer),
+ )
try:
meta.create_all()
t.insert().execute(safe=1)
"""Tests autoload of tables created with quoted column names."""
# This is quirky in sqlite.
+
testing.db.execute("""CREATE TABLE "django_content_type" (
"id" integer NOT NULL PRIMARY KEY,
"django_stuff" text NULL
CREATE TABLE "django_admin_log" (
"id" integer NOT NULL PRIMARY KEY,
"action_time" datetime NOT NULL,
- "content_type_id" integer NULL REFERENCES "django_content_type" ("id"),
+ "content_type_id" integer NULL
+ REFERENCES "django_content_type" ("id"),
"object_id" text NULL,
"change_message" text NOT NULL
)
""")
try:
meta = MetaData(testing.db)
- table1 = Table("django_admin_log", meta, autoload=True)
- table2 = Table("django_content_type", meta, autoload=True)
+ table1 = Table('django_admin_log', meta, autoload=True)
+ table2 = Table('django_content_type', meta, autoload=True)
j = table1.join(table2)
- assert j.onclause.compare(table1.c.content_type_id==table2.c.id)
+ assert j.onclause.compare(table1.c.content_type_id
+ == table2.c.id)
finally:
- testing.db.execute("drop table django_admin_log")
- testing.db.execute("drop table django_content_type")
-
+ testing.db.execute('drop table django_admin_log')
+ testing.db.execute('drop table django_content_type')
def test_attached_as_schema(self):
cx = testing.db.connect()
cx.execute('ATTACH DATABASE ":memory:" AS test_schema')
dialect = cx.dialect
assert dialect.get_table_names(cx, 'test_schema') == []
-
meta = MetaData(cx)
Table('created', meta, Column('id', Integer),
schema='test_schema')
alt_master = Table('sqlite_master', meta, autoload=True,
schema='test_schema')
meta.create_all(cx)
-
- eq_(dialect.get_table_names(cx, 'test_schema'),
- ['created'])
+ eq_(dialect.get_table_names(cx, 'test_schema'), ['created'])
assert len(alt_master.c) > 0
-
meta.clear()
reflected = Table('created', meta, autoload=True,
schema='test_schema')
assert len(reflected.c) == 1
-
cx.execute(reflected.insert(), dict(id=1))
r = cx.execute(reflected.select()).fetchall()
- assert list(r) == [(1,)]
-
+ assert list(r) == [(1, )]
cx.execute(reflected.update(), dict(id=2))
r = cx.execute(reflected.select()).fetchall()
- assert list(r) == [(2,)]
-
- cx.execute(reflected.delete(reflected.c.id==2))
+ assert list(r) == [(2, )]
+ cx.execute(reflected.delete(reflected.c.id == 2))
r = cx.execute(reflected.select()).fetchall()
assert list(r) == []
# note that sqlite_master is cleared, above
- meta.drop_all()
+ meta.drop_all()
assert dialect.get_table_names(cx, 'test_schema') == []
finally:
cx.execute('DETACH DATABASE test_schema')
cx = testing.db.connect()
try:
cx.execute('CREATE TEMPORARY TABLE tempy (id INT)')
-
assert 'tempy' in cx.dialect.get_table_names(cx, None)
-
meta = MetaData(cx)
tempy = Table('tempy', meta, autoload=True)
assert len(tempy.c) == 1
except exc.DBAPIError:
pass
raise
-
-
+
def test_dont_reflect_autoindex(self):
meta = MetaData(testing.db)
t = Table('foo', meta, Column('bar', String, primary_key=True))
meta.create_all()
-
from sqlalchemy.engine.reflection import Inspector
try:
inspector = Inspector(testing.db)
eq_(inspector.get_indexes('foo'), [])
- eq_(
- inspector.get_indexes('foo', include_auto_indexes=True),
- [{'unique': 1, 'name': u'sqlite_autoindex_foo_1', 'column_names': [u'bar']}]
- )
+ eq_(inspector.get_indexes('foo',
+ include_auto_indexes=True), [{'unique': 1, 'name'
+ : u'sqlite_autoindex_foo_1', 'column_names': [u'bar']}])
finally:
meta.drop_all()
-
-
+
def test_set_isolation_level(self):
"""Test setting the read uncommitted/serializable levels"""
- eng = create_engine(testing.db.url)
- eq_(eng.execute("PRAGMA read_uncommitted").scalar(), 0)
-
- eng = create_engine(testing.db.url, isolation_level="READ UNCOMMITTED")
- eq_(eng.execute("PRAGMA read_uncommitted").scalar(), 1)
-
- eng = create_engine(testing.db.url, isolation_level="SERIALIZABLE")
- eq_(eng.execute("PRAGMA read_uncommitted").scalar(), 0)
+ eng = create_engine(testing.db.url)
+ eq_(eng.execute('PRAGMA read_uncommitted').scalar(), 0)
+ eng = create_engine(testing.db.url,
+ isolation_level='READ UNCOMMITTED')
+ eq_(eng.execute('PRAGMA read_uncommitted').scalar(), 1)
+ eng = create_engine(testing.db.url,
+ isolation_level='SERIALIZABLE')
+ eq_(eng.execute('PRAGMA read_uncommitted').scalar(), 0)
assert_raises(exc.ArgumentError, create_engine, testing.db.url,
- isolation_level="FOO")
-
+ isolation_level='FOO')
def test_create_index_with_schema(self):
"""Test creation of index with explicit schema"""
meta = MetaData(testing.db)
- t = Table('foo', meta, Column('bar', String, index=True), schema='main')
-
+ t = Table('foo', meta, Column('bar', String, index=True),
+ schema='main')
try:
meta.create_all()
finally:
class SQLTest(TestBase, AssertsCompiledSQL):
+
"""Tests SQLite-dialect specific compilation."""
__dialect__ = sqlite.dialect()
-
def test_extract(self):
t = sql.table('t', sql.column('col1'))
-
mapping = {
'month': '%m',
'day': '%d',
'dow': '%w',
'week': '%W',
}
-
for field, subst in mapping.items():
- self.assert_compile(
- select([extract(field, t.c.col1)]),
- "SELECT CAST(STRFTIME('%s', t.col1) AS INTEGER) AS anon_1 "
- "FROM t" % subst)
+ self.assert_compile(select([extract(field, t.c.col1)]),
+ "SELECT CAST(STRFTIME('%s', t.col1) AS "
+ "INTEGER) AS anon_1 FROM t" % subst)
class InsertTest(TestBase, AssertsExecutionResults):
+
"""Tests inserts and autoincrement."""
__only_on__ = 'sqlite'
- # empty insert (i.e. INSERT INTO table DEFAULT VALUES)
- # fails on 3.3.7 and before
+ # empty insert (i.e. INSERT INTO table DEFAULT VALUES) fails on
+ # 3.3.7 and before
+
def _test_empty_insert(self, table, expect=1):
try:
table.create()
- for wanted in (expect, expect * 2):
-
+ for wanted in expect, expect * 2:
table.insert().execute()
-
rows = table.select().execute().fetchall()
eq_(len(rows), wanted)
finally:
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk1(self):
- self._test_empty_insert(
- Table('a', MetaData(testing.db),
- Column('id', Integer, primary_key=True)))
+ self._test_empty_insert(Table('a', MetaData(testing.db),
+ Column('id', Integer,
+ primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk2(self):
- assert_raises(
- exc.DBAPIError,
- self._test_empty_insert,
- Table('b', MetaData(testing.db),
- Column('x', Integer, primary_key=True),
- Column('y', Integer, primary_key=True)))
+ assert_raises(exc.DBAPIError, self._test_empty_insert, Table('b'
+ , MetaData(testing.db), Column('x', Integer,
+ primary_key=True), Column('y', Integer,
+ primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk3(self):
- assert_raises(
- exc.DBAPIError,
- self._test_empty_insert,
- Table('c', MetaData(testing.db),
- Column('x', Integer, primary_key=True),
- Column('y', Integer, DefaultClause('123'),
- primary_key=True)))
+ assert_raises(exc.DBAPIError, self._test_empty_insert, Table('c'
+ , MetaData(testing.db), Column('x', Integer,
+ primary_key=True), Column('y', Integer,
+ DefaultClause('123'), primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk4(self):
- self._test_empty_insert(
- Table('d', MetaData(testing.db),
- Column('x', Integer, primary_key=True),
- Column('y', Integer, DefaultClause('123'))))
+ self._test_empty_insert(Table('d', MetaData(testing.db),
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, DefaultClause('123'
+ ))))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_nopk1(self):
- self._test_empty_insert(
- Table('e', MetaData(testing.db),
- Column('id', Integer)))
+ self._test_empty_insert(Table('e', MetaData(testing.db),
+ Column('id', Integer)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_nopk2(self):
- self._test_empty_insert(
- Table('f', MetaData(testing.db),
- Column('x', Integer),
- Column('y', Integer)))
+ self._test_empty_insert(Table('f', MetaData(testing.db),
+ Column('x', Integer), Column('y',
+ Integer)))
def test_inserts_with_spaces(self):
- tbl = Table('tbl', MetaData('sqlite:///'),
- Column('with space', Integer),
- Column('without', Integer))
+ tbl = Table('tbl', MetaData('sqlite:///'), Column('with space',
+ Integer), Column('without', Integer))
tbl.create()
try:
- tbl.insert().execute({'without':123})
+ tbl.insert().execute({'without': 123})
assert list(tbl.select().execute()) == [(None, 123)]
-
- tbl.insert().execute({'with space':456})
- assert list(tbl.select().execute()) == [(None, 123), (456, None)]
-
+ tbl.insert().execute({'with space': 456})
+ assert list(tbl.select().execute()) == [(None, 123), (456,
+ None)]
finally:
tbl.drop()
+
def full_text_search_missing():
- """Test if full text search is not implemented and return False if
+ """Test if full text search is not implemented and return False if
it is and True otherwise."""
try:
- testing.db.execute("CREATE VIRTUAL TABLE t using FTS3;")
- testing.db.execute("DROP TABLE t;")
+ testing.db.execute('CREATE VIRTUAL TABLE t using FTS3;')
+ testing.db.execute('DROP TABLE t;')
return False
except:
return True
+
class MatchTest(TestBase, AssertsCompiledSQL):
+
__only_on__ = 'sqlite'
- __skip_if__ = (full_text_search_missing, )
+ __skip_if__ = full_text_search_missing,
@classmethod
def setup_class(cls):
global metadata, cattable, matchtable
metadata = MetaData(testing.db)
-
testing.db.execute("""
CREATE VIRTUAL TABLE cattable using FTS3 (
id INTEGER NOT NULL,
)
""")
cattable = Table('cattable', metadata, autoload=True)
-
testing.db.execute("""
CREATE VIRTUAL TABLE matchtable using FTS3 (
id INTEGER NOT NULL,
""")
matchtable = Table('matchtable', metadata, autoload=True)
metadata.create_all()
-
- cattable.insert().execute([
- {'id': 1, 'description': 'Python'},
- {'id': 2, 'description': 'Ruby'},
- ])
- matchtable.insert().execute([
- {'id': 1, 'title': 'Agile Web Development with Rails', 'category_id': 2},
- {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
- {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
- {'id': 4, 'title': 'The Definitive Guide to Django', 'category_id': 1},
- {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}
- ])
+ cattable.insert().execute([{'id': 1, 'description': 'Python'},
+ {'id': 2, 'description': 'Ruby'}])
+ matchtable.insert().execute([{'id': 1, 'title'
+ : 'Agile Web Development with Rails'
+ , 'category_id': 2}, {'id': 2,
+ 'title': 'Dive Into Python',
+ 'category_id': 1}, {'id': 3, 'title'
+ : "Programming Matz's Ruby",
+ 'category_id': 2}, {'id': 4, 'title'
+ : 'The Definitive Guide to Django',
+ 'category_id': 1}, {'id': 5, 'title'
+ : 'Python in a Nutshell',
+ 'category_id': 1}])
@classmethod
def teardown_class(cls):
metadata.drop_all()
def test_expression(self):
- self.assert_compile(matchtable.c.title.match('somstr'), "matchtable.title MATCH ?")
+ self.assert_compile(matchtable.c.title.match('somstr'),
+ 'matchtable.title MATCH ?')
def test_simple_match(self):
- results = matchtable.select().where(matchtable.c.title.match('python')).order_by(matchtable.c.id).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match('python'
+ )).order_by(matchtable.c.id).execute().fetchall()
eq_([2, 5], [r.id for r in results])
def test_simple_prefix_match(self):
- results = matchtable.select().where(matchtable.c.title.match('nut*')).execute().fetchall()
+ results = \
+ matchtable.select().where(matchtable.c.title.match('nut*'
+ )).execute().fetchall()
eq_([5], [r.id for r in results])
def test_or_match(self):
- results2 = matchtable.select().where(matchtable.c.title.match('nutshell OR ruby'),
- ).order_by(matchtable.c.id).execute().fetchall()
+ results2 = \
+ matchtable.select().where(
+ matchtable.c.title.match('nutshell OR ruby'
+ )).order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results2])
-
def test_and_match(self):
- results2 = matchtable.select().where(matchtable.c.title.match('python nutshell'),
- ).execute().fetchall()
+ results2 = \
+ matchtable.select().where(
+ matchtable.c.title.match('python nutshell'
+ )).execute().fetchall()
eq_([5], [r.id for r in results2])
def test_match_across_joins(self):
- results = matchtable.select().where(and_(cattable.c.id==matchtable.c.category_id,
- cattable.c.description.match('Ruby'))
- ).order_by(matchtable.c.id).execute().fetchall()
+ results = matchtable.select().where(and_(cattable.c.id
+ == matchtable.c.category_id,
+ cattable.c.description.match('Ruby'
+ ))).order_by(matchtable.c.id).execute().fetchall()
eq_([1, 3], [r.id for r in results])
+
class TestAutoIncrement(TestBase, AssertsCompiledSQL):
def test_sqlite_autoincrement(self):
- table = Table('autoinctable', MetaData(),
- Column('id', Integer, primary_key=True),
- Column('x', Integer, default=None),
- sqlite_autoincrement=True)
- self.assert_compile(
- schema.CreateTable(table),
- "CREATE TABLE autoinctable (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, x INTEGER)",
- dialect=sqlite.dialect()
- )
+ table = Table('autoinctable', MetaData(), Column('id', Integer,
+ primary_key=True), Column('x', Integer,
+ default=None), sqlite_autoincrement=True)
+ self.assert_compile(schema.CreateTable(table),
+ 'CREATE TABLE autoinctable (id INTEGER NOT '
+ 'NULL PRIMARY KEY AUTOINCREMENT, x INTEGER)'
+ , dialect=sqlite.dialect())
def test_sqlite_autoincrement_constraint(self):
- table = Table('autoinctable', MetaData(),
- Column('id', Integer, primary_key=True),
- Column('x', Integer, default=None),
- UniqueConstraint('x'),
- sqlite_autoincrement=True)
- self.assert_compile(
- schema.CreateTable(table),
- "CREATE TABLE autoinctable (id INTEGER NOT NULL "
- "PRIMARY KEY AUTOINCREMENT, x INTEGER, UNIQUE (x))",
- dialect=sqlite.dialect()
- )
+ table = Table(
+ 'autoinctable',
+ MetaData(),
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer, default=None),
+ UniqueConstraint('x'),
+ sqlite_autoincrement=True,
+ )
+ self.assert_compile(schema.CreateTable(table),
+ 'CREATE TABLE autoinctable (id INTEGER NOT '
+ 'NULL PRIMARY KEY AUTOINCREMENT, x '
+ 'INTEGER, UNIQUE (x))',
+ dialect=sqlite.dialect())
def test_sqlite_no_autoincrement(self):
- table = Table('noautoinctable', MetaData(),
- Column('id', Integer, primary_key=True),
- Column('x', Integer, default=None))
- self.assert_compile(
- schema.CreateTable(table),
- "CREATE TABLE noautoinctable (id INTEGER NOT NULL, x INTEGER, PRIMARY KEY (id))",
- dialect=sqlite.dialect()
- )
-
+ table = Table('noautoinctable', MetaData(), Column('id',
+ Integer, primary_key=True), Column('x', Integer,
+ default=None))
+ self.assert_compile(schema.CreateTable(table),
+ 'CREATE TABLE noautoinctable (id INTEGER '
+ 'NOT NULL, x INTEGER, PRIMARY KEY (id))',
+ dialect=sqlite.dialect())