- Importing testenv has no side effects- explicit functions provide similar behavior to the old immediate behavior of testbase
- testing.db has the configured db
- Fixed up the perf/* scripts
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
import orm.alltests as orm
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
def suite():
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
import sqlalchemy.topological as topological
from sqlalchemy import util
from testlib import *
assert False, "Tuple not in dependency tree: " + str(tuple) + " " + str(node)
for c in node[2]:
assert_tuple(tuple, c)
-
+
for tuple in tuples:
assert_tuple(list(tuple), node)
assert_unique(c)
assert_unique(node)
assert len(collection) == 0
-
+
def testsort(self):
rootnode = 'root'
node2 = 'node2'
head1 = topological.sort_as_tree(tuples, [node1, node2, node3])
head2 = topological.sort_as_tree(tuples, [node3, node1, node2])
head3 = topological.sort_as_tree(tuples, [node3, node2, node1])
-
+
# TODO: figure out a "node == node2" function
#self.assert_(str(head1) == str(head2) == str(head3))
print "\n" + str(head1)
self.assert_sort(tuples, head)
def testsort5(self):
- # this one, depenending on the weather,
+ # this one, depenending on the weather,
node1 = 'node1' #'00B94190'
node2 = 'node2' #'00B94990'
node3 = 'node3' #'00B9A9B0'
allitems = [node1, node2, node3, node4]
head = topological.sort_as_tree(tuples, allitems, with_cycles=True)
self.assert_sort(tuples, head)
-
+
def testcircular2(self):
# this condition was arising from ticket:362
# and was not treated properly by topological sort
]
head = topological.sort_as_tree(tuples, [], with_cycles=True)
self.assert_sort(tuples, head)
-
+
def testcircular3(self):
nodes = {}
tuples = [('Question', 'Issue'), ('ProviderService', 'Issue'), ('Provider', 'Question'), ('Question', 'Provider'), ('ProviderService', 'Question'), ('Provider', 'ProviderService'), ('Question', 'Answer'), ('Issue', 'Question')]
head = topological.sort_as_tree(tuples, [], with_cycles=True)
self.assert_sort(tuples, head)
-
+
def testbigsort(self):
tuples = []
for i in range(0,1500, 2):
tuples.append((i, i+1))
head = topological.sort_as_tree(tuples, [])
-
-
-
+
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
"""Tests exceptions and DB-API exception wrapping."""
-
-import testbase
+import testenv; testenv.configure_for_tests()
import sys, unittest
import exceptions as stdlib_exceptions
from sqlalchemy import exceptions as sa_exceptions
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
from sqlalchemy import util, sql, exceptions
from testlib import *
self.assert_(o.keys() == ['a', 'b', 'c', 'd', 'e', 'f'])
self.assert_(o.values() == [1, 2, 3, 4, 5, 6])
-class OrderedSetTest(PersistTest):
+class OrderedSetTest(PersistTest):
def test_mutators_against_iter(self):
# testing a set modified against an iterator
o = util.OrderedSet([3,2, 4, 5])
-
- self.assertEquals(o.difference(iter([3,4])), util.OrderedSet([2,5]))
- self.assertEquals(o.intersection(iter([3,4, 6])), util.OrderedSet([3, 4]))
+ self.assertEquals(o.difference(iter([3,4])),
+ util.OrderedSet([2,5]))
+ self.assertEquals(o.intersection(iter([3,4, 6])),
+ util.OrderedSet([3, 4]))
+ self.assertEquals(o.union(iter([3,4, 6])),
+ util.OrderedSet([2, 3, 4, 5, 6]))
- self.assertEquals(o.union(iter([3,4, 6])), util.OrderedSet([2, 3, 4, 5, 6]))
-
-
class ColumnCollectionTest(PersistTest):
def test_in(self):
cc = sql.ColumnCollection()
return self.value != other.value
else:
return True
-
class HashEqOverride(object):
def __init__(self, value=None):
self.value = value
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.databases import access
from testlib import *
+
class BasicTest(AssertMixin):
# A simple import of the database/ module should work on all systems.
def test_import(self):
# we got this far, right?
return True
+
+
+if __name__ == "__main__":
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
+
def suite():
modules_to_test = (
'dialect.access',
'dialect.firebird',
'dialect.informix',
+ 'dialect.maxdb',
'dialect.mssql',
'dialect.mysql',
'dialect.oracle',
'dialect.postgres',
'dialect.sqlite',
+ 'dialect.sybase',
)
alltests = unittest.TestSuite()
for name in modules_to_test:
return alltests
-
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.databases import firebird
from sqlalchemy.exceptions import ProgrammingError
__only_on__ = 'firebird'
def setUpAll(self):
- con = testbase.db.connect()
+ con = testing.db.connect()
try:
con.execute('CREATE DOMAIN int_domain AS INTEGER DEFAULT 42 NOT NULL')
con.execute('CREATE DOMAIN str_domain AS VARCHAR(255)')
END''')
def tearDownAll(self):
- con = testbase.db.connect()
+ con = testing.db.connect()
con.execute('DROP TABLE testtable')
con.execute('DROP DOMAIN int_domain')
con.execute('DROP DOMAIN str_domain')
con.execute('DROP GENERATOR gen_testtable_id')
def test_table_is_reflected(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
self.assertEquals(set(table.columns.keys()),
set(['question', 'answer', 'remark', 'photo', 'd', 't', 'dt']),
class MiscFBTests(PersistTest):
__only_on__ = 'firebird'
-
+
def test_strlen(self):
# On FB the length() function is implemented by an external
# UDF, strlen(). Various SA tests fail because they pass a
# parameter to it, and that does not work (it always results
# the maximum string length the UDF was declared to accept).
# This test checks that at least it works ok in other cases.
-
- meta = MetaData(testbase.db)
+
+ meta = MetaData(testing.db)
t = Table('t1', meta,
Column('id', Integer, Sequence('t1idseq'), primary_key=True),
Column('name', String(10))
meta.drop_all()
def test_server_version_info(self):
- version = testbase.db.dialect.server_version_info(testbase.db.connect())
+ version = testing.db.dialect.server_version_info(testing.db.connect())
assert len(version) == 3, "Got strange version info: %s" % repr(version)
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.databases import informix
from testlib import *
+
class BasicTest(AssertMixin):
# A simple import of the database/ module should work on all systems.
def test_import(self):
# we got this far, right?
return True
+
+
+if __name__ == "__main__":
+ testenv.main()
"""MaxDB-specific tests."""
-import testbase
+import testenv; testenv.configure_for_tests()
import StringIO, sys
from sqlalchemy import *
from sqlalchemy import exceptions, sql
decimal assignment and selection behavior.
"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
try:
if isinstance(tabledef, basestring):
# run textual CREATE TABLE
- testbase.db.execute(tabledef)
+ testing.db.execute(tabledef)
else:
_t = tabledef.tometadata(meta)
_t.create()
tuple([2] + vals)])
finally:
try:
- testbase.db.execute("DROP TABLE dectest")
+ testing.db.execute("DROP TABLE dectest")
except exceptions.DatabaseError:
pass
def test_assorted_type_aliases(self):
"""Ensures that aliased types are reflected properly."""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
try:
- testbase.db.execute("""
+ testing.db.execute("""
CREATE TABLE assorted (
c1 INT,
c2 BINARY(2),
self.assert_(isinstance(col.type, expected[i]))
finally:
try:
- testbase.db.execute("DROP TABLE assorted")
+ testing.db.execute("DROP TABLE assorted")
except exceptions.DatabaseError:
pass
__only_on__ = 'maxdb'
def test_dbapi_breaks_sequences(self):
- con = testbase.db.connect().connection
+ con = testing.db.connect().connection
cr = con.cursor()
cr.execute('CREATE SEQUENCE busto START WITH 1 INCREMENT BY 1')
cr.execute('DROP SEQUENCE busto')
def test_dbapi_breaks_mod_binds(self):
- con = testbase.db.connect().connection
+ con = testing.db.connect().connection
cr = con.cursor()
# OK
cr.execute('SELECT MOD(?, 2) FROM DUAL', [3])
def test_dbapi_breaks_close(self):
- dialect = testbase.db.dialect
- cargs, ckw = dialect.create_connect_args(testbase.db.url)
+ dialect = testing.db.dialect
+ cargs, ckw = dialect.create_connect_args(testing.db.url)
# There doesn't seem to be a way to test for this as it occurs in
# regular usage- the warning doesn't seem to go through 'warnings'.
self.assert_(True)
def test_modulo_operator(self):
- st = str(select([sql.column('col') % 5]).compile(testbase.db))
+ st = str(select([sql.column('col') % 5]).compile(testing.db))
self.assertEquals(st, 'SELECT mod(col, ?) FROM DUAL')
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import re
from sqlalchemy import *
from sqlalchemy.sql import table, column
from sqlalchemy.databases import mssql
from testlib import *
-# TODO: migrate all MS-SQL tests here
class CompileTest(SQLCompileTest):
__dialect__ = mssql.MSSQLDialect()
__only_on__ = 'mssql'
def testidentity(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table(
'identity_test', meta,
Column('col1', Integer, Sequence('fred', 2, 3), primary_key=True)
)
table.create()
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
try:
table2 = Table('identity_test', meta2, autoload=True)
assert table2.c['col1'].sequence.start == 2
__only_on__ = 'mssql'
def test_fetchid_trigger(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
t1 = Table('t1', meta,
Column('id', Integer, Sequence('fred', 100, 1), primary_key=True),
Column('descr', String(200)))
Column('id', Integer, Sequence('fred', 200, 1), primary_key=True),
Column('descr', String(200)))
meta.create_all()
- con = testbase.db.connect()
+ con = testing.db.connect()
con.execute("""create trigger paj on t1 for insert as
insert into t2 (descr) select descr from inserted""")
meta.drop_all()
def test_insertid_schema(self):
- meta = MetaData(testbase.db)
- con = testbase.db.connect()
+ meta = MetaData(testing.db)
+ con = testing.db.connect()
con.execute('create schema paj')
tbl = Table('test', meta, Column('id', Integer, primary_key=True), schema='paj')
tbl.create()
con.execute('drop schema paj')
def test_insertid_reserved(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table(
'select', meta,
Column('col', Integer, primary_key=True)
)
table.create()
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
try:
table.insert().execute(col=7)
finally:
table.drop()
def test_select_limit_nooffset(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
users = Table('query_users', metadata,
Column('user_id', INT, primary_key = True),
def setUpAll(self):
global foo, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
foo = Table('foo', metadata,
Column('id', Integer, Sequence('foo_id_seq'),
primary_key=True),
mapper(Foo, foo)
metadata.create_all()
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
for i in range(100):
sess.save(Foo(bar=i, range=i%10))
sess.flush()
clear_mappers()
def test_slice_mssql(self):
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
query = sess.query(Foo)
orig = query.all()
assert list(query[:10]) == orig[:10]
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import sets
from sqlalchemy import *
from sqlalchemy import sql, exceptions
__only_on__ = 'mysql'
def test_basic(self):
- meta1 = MetaData(testbase.db)
+ meta1 = MetaData(testing.db)
table = Table(
'mysql_types', meta1,
Column('id', Integer, primary_key=True),
try:
table.drop(checkfirst=True)
table.create()
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
t2 = Table('mysql_types', meta2, autoload=True)
assert isinstance(t2.c.num1.type, mysql.MSInteger)
assert t2.c.num1.type.unsigned
'SMALLINT(4) UNSIGNED ZEROFILL'),
]
- table_args = ['test_mysql_numeric', MetaData(testbase.db)]
+ table_args = ['test_mysql_numeric', MetaData(testing.db)]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
table_args.append(Column('c%s' % index, type_(*args, **kw)))
numeric_table = Table(*table_args)
- gen = testbase.db.dialect.schemagenerator(testbase.db.dialect, testbase.db, None, None)
+ gen = testing.db.dialect.schemagenerator(testing.db.dialect, testing.db, None, None)
for col in numeric_table.c:
index = int(col.name[1:])
'''ENUM('foo','bar') UNICODE''')
]
- table_args = ['test_mysql_charset', MetaData(testbase.db)]
+ table_args = ['test_mysql_charset', MetaData(testing.db)]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
table_args.append(Column('c%s' % index, type_(*args, **kw)))
charset_table = Table(*table_args)
- gen = testbase.db.dialect.schemagenerator(testbase.db.dialect, testbase.db, None, None)
+ gen = testing.db.dialect.schemagenerator(testing.db.dialect, testing.db, None, None)
for col in charset_table.c:
index = int(col.name[1:])
def test_bit_50(self):
"""Exercise BIT types on 5.0+ (not valid for all engine types)"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
bit_table = Table('mysql_bits', meta,
Column('b1', mysql.MSBit),
Column('b2', mysql.MSBit()),
try:
meta.create_all()
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
reflected = Table('mysql_bits', meta2, autoload=True)
for table in bit_table, reflected:
def test_boolean(self):
"""Test BOOL/TINYINT(1) compatability and reflection."""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
bool_table = Table('mysql_bool', meta,
Column('b1', BOOLEAN),
Column('b2', mysql.MSBoolean),
roundtrip([True, True, True, True], [True, True, 1, 1])
roundtrip([False, False, 0, 0], [False, False, 0, 0])
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
# replace with reflected
table = Table('mysql_bool', meta2, autoload=True)
self.assert_eq(colspec(table.c.b3), 'b3 BOOL')
def test_timestamp(self):
"""Exercise funky TIMESTAMP default syntax."""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
try:
columns = [
self.assert_eq(colspec(t.c.t), "t %s" % expected)
self.assert_(repr(t.c.t))
t.create()
- r = Table('mysql_ts%s' % idx, MetaData(testbase.db),
+ r = Table('mysql_ts%s' % idx, MetaData(testing.db),
autoload=True)
if len(spec) > 1:
self.assert_(r.c.t is not None)
def test_year(self):
"""Exercise YEAR."""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
year_table = Table('mysql_year', meta,
Column('y1', mysql.MSYear),
Column('y2', mysql.MSYear),
self.assert_(repr(col))
try:
year_table.create()
- reflected = Table('mysql_year', MetaData(testbase.db),
+ reflected = Table('mysql_year', MetaData(testing.db),
autoload=True)
for table in year_table, reflected:
def test_set(self):
"""Exercise the SET type."""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
set_table = Table('mysql_set', meta,
Column('s1', mysql.MSSet("'dq'", "'sq'")),
Column('s2', mysql.MSSet("'a'")),
self.assert_(repr(col))
try:
set_table.create()
- reflected = Table('mysql_set', MetaData(testbase.db),
+ reflected = Table('mysql_set', MetaData(testing.db),
autoload=True)
for table in set_table, reflected:
def test_enum(self):
"""Exercise the ENUM type."""
- db = testbase.db
- enum_table = Table('mysql_enum', MetaData(testbase.db),
+ db = testing.db
+ enum_table = Table('mysql_enum', MetaData(testing.db),
Column('e1', mysql.MSEnum("'a'", "'b'")),
Column('e2', mysql.MSEnum("'a'", "'b'"),
nullable=False),
# This is known to fail with MySQLDB 1.2.2 beta versions
# which return these as sets.Set(['a']), sets.Set(['b'])
# (even on Pythons with __builtin__.set)
- if testbase.db.dialect.dbapi.version_info < (1, 2, 2, 'beta', 3) and \
- testbase.db.dialect.dbapi.version_info >= (1, 2, 2):
+ if testing.db.dialect.dbapi.version_info < (1, 2, 2, 'beta', 3) and \
+ testing.db.dialect.dbapi.version_info >= (1, 2, 2):
# these mysqldb seem to always uses 'sets', even on later pythons
import sets
def convert(value):
# MySQL 3.23 can't handle an ENUM of ''....
- enum_table = Table('mysql_enum', MetaData(testbase.db),
+ enum_table = Table('mysql_enum', MetaData(testing.db),
Column('e1', mysql.MSEnum("'a'")),
Column('e2', mysql.MSEnum("''")),
Column('e3', mysql.MSEnum("'a'", "''")),
self.assert_(repr(col))
try:
enum_table.create()
- reflected = Table('mysql_enum', MetaData(testbase.db),
+ reflected = Table('mysql_enum', MetaData(testing.db),
autoload=True)
for t in enum_table, reflected:
assert t.c.e1.type.enums == ["a"]
def test_default_reflection(self):
"""Test reflection of column defaults."""
- def_table = Table('mysql_def', MetaData(testbase.db),
+ def_table = Table('mysql_def', MetaData(testing.db),
Column('c1', String(10), PassiveDefault('')),
Column('c2', String(10), PassiveDefault('0')),
Column('c3', String(10), PassiveDefault('abc')))
try:
def_table.create()
- reflected = Table('mysql_def', MetaData(testbase.db),
+ reflected = Table('mysql_def', MetaData(testing.db),
autoload=True)
for t in def_table, reflected:
assert t.c.c1.default.arg == ''
columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)]
- db = testbase.db
+ db = testing.db
m = MetaData(db)
t_table = Table('mysql_types', m, *columns)
try:
m.drop_all()
def test_autoincrement(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
try:
Table('ai_1', meta,
Column('int_y', Integer, primary_key=True),
table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
'ai_5', 'ai_6', 'ai_7', 'ai_8']
- mr = MetaData(testbase.db)
+ mr = MetaData(testing.db)
mr.reflect(only=table_names)
for tbl in [mr.tables[name] for name in table_names]:
def colspec(c):
- return testbase.db.dialect.schemagenerator(testbase.db.dialect,
- testbase.db, None, None).get_column_specification(c)
+ return testing.db.dialect.schemagenerator(testing.db.dialect,
+ testing.db, None, None).get_column_specification(c)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.sql import table, column
from sqlalchemy.databases import oracle
__only_on__ = 'oracle'
def setUpAll(self):
- testbase.db.execute("""
+ testing.db.execute("""
create or replace procedure foo(x_in IN number, x_out OUT number, y_out OUT number) IS
retval number;
begin
""")
def test_out_params(self):
- result = testbase.db.execute(text("begin foo(:x, :y, :z); end;", bindparams=[bindparam('x', Numeric), outparam('y', Numeric), outparam('z', Numeric)]), x=5)
+ result = testing.db.execute(text("begin foo(:x, :y, :z); end;", bindparams=[bindparam('x', Numeric), outparam('y', Numeric), outparam('z', Numeric)]), x=5)
assert result.out_parameters == {'y':10, 'z':75}, result.out_parameters
print result.out_parameters
def tearDownAll(self):
- testbase.db.execute("DROP PROCEDURE foo")
+ testing.db.execute("DROP PROCEDURE foo")
class CompileTest(SQLCompileTest):
def test_reflect_raw(self):
types_table = Table(
- 'all_types', MetaData(testbase.db),
+ 'all_types', MetaData(testing.db),
Column('owner', String(30), primary_key=True),
Column('type_name', String(30), primary_key=True),
autoload=True,
[[row[k] for k in row.keys()] for row in types_table.select().execute().fetchall()]
def test_longstring(self):
- metadata = MetaData(testbase.db)
- testbase.db.execute("""
+ metadata = MetaData(testing.db)
+ testing.db.execute("""
CREATE TABLE Z_TEST
(
ID NUMERIC(22) PRIMARY KEY,
t.insert().execute(id=1.0, add_user='foobar')
assert t.select().execute().fetchall() == [(1, 'foobar')]
finally:
- testbase.db.execute("DROP TABLE Z_TEST")
+ testing.db.execute("DROP TABLE Z_TEST")
class SequenceTest(SQLCompileTest):
def test_basic(self):
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime
from sqlalchemy import *
from sqlalchemy import exceptions
from testlib import *
from sqlalchemy.sql import table, column
+
class SequenceTest(SQLCompileTest):
def test_basic(self):
seq = Sequence("my_seq_no_schema")
@testing.exclude('postgres', '<', (8, 2))
def test_update_returning(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table('tables', meta,
Column('id', Integer, primary_key=True),
Column('persons', Integer),
@testing.exclude('postgres', '<', (8, 2))
def test_insert_returning(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table('tables', meta,
Column('id', Integer, primary_key=True),
Column('persons', Integer),
result3 = table.insert(postgres_returning=[(table.c.id*2).label('double_id')]).execute({'persons': 4, 'full': False})
self.assertEqual([dict(row) for row in result3], [{'double_id':8}])
- result4 = testbase.db.execute('insert into tables (id, persons, "full") values (5, 10, true) returning persons')
+ result4 = testing.db.execute('insert into tables (id, persons, "full") values (5, 10, true) returning persons')
self.assertEqual([dict(row) for row in result4], [{'persons': 10}])
finally:
table.drop()
def setUpAll(self):
global metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
def tearDown(self):
metadata.drop_all()
# note that the test framework doesnt capture the "preexecute" of a seqeuence
# or default. we just see it in the bind params.
- self.assert_sql(testbase.db, go, [], with_sequences=[
+ self.assert_sql(testing.db, go, [], with_sequences=[
(
"INSERT INTO testtable (id, data) VALUES (:id, :data)",
{'id':30, 'data':'d1'}
# test the same series of events using a reflected
# version of the table
- m2 = MetaData(testbase.db)
+ m2 = MetaData(testing.db)
table = Table(table.name, m2, autoload=True)
def go():
table.insert(inline=True).execute({'id':33, 'data':'d7'})
table.insert(inline=True).execute({'data':'d8'})
- self.assert_sql(testbase.db, go, [], with_sequences=[
+ self.assert_sql(testing.db, go, [], with_sequences=[
(
"INSERT INTO testtable (id, data) VALUES (:id, :data)",
{'id':30, 'data':'d1'}
table.insert(inline=True).execute({'id':33, 'data':'d7'})
table.insert(inline=True).execute({'data':'d8'})
- self.assert_sql(testbase.db, go, [], with_sequences=[
+ self.assert_sql(testing.db, go, [], with_sequences=[
(
"INSERT INTO testtable (id, data) VALUES (:id, :data)",
{'id':30, 'data':'d1'}
# test the same series of events using a reflected
# version of the table
- m2 = MetaData(testbase.db)
+ m2 = MetaData(testing.db)
table = Table(table.name, m2, autoload=True)
table.insert().execute({'id':30, 'data':'d1'})
try:
__only_on__ = 'postgres'
def setUpAll(self):
- con = testbase.db.connect()
+ con = testing.db.connect()
try:
con.execute('CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42')
con.execute('CREATE DOMAIN alt_schema.testdomain INTEGER DEFAULT 0')
con.execute('CREATE TABLE crosschema (question integer, answer alt_schema.testdomain)')
def tearDownAll(self):
- con = testbase.db.connect()
+ con = testing.db.connect()
con.execute('DROP TABLE testtable')
con.execute('DROP TABLE alt_schema.testtable')
con.execute('DROP TABLE crosschema')
con.execute('DROP DOMAIN alt_schema.testdomain')
def test_table_is_reflected(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
self.assertEquals(set(table.columns.keys()), set(['question', 'answer']), "Columns of reflected table didn't equal expected columns")
self.assertEquals(table.c.answer.type.__class__, postgres.PGInteger)
def test_domain_is_reflected(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
self.assertEquals(str(table.columns.answer.default.arg), '42', "Reflected default value didn't equal expected value")
self.assertFalse(table.columns.answer.nullable, "Expected reflected column to not be nullable.")
def test_table_is_reflected_alt_schema(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True, schema='alt_schema')
self.assertEquals(set(table.columns.keys()), set(['question', 'answer', 'anything']), "Columns of reflected table didn't equal expected columns")
self.assertEquals(table.c.anything.type.__class__, postgres.PGInteger)
def test_schema_domain_is_reflected(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True, schema='alt_schema')
self.assertEquals(str(table.columns.answer.default.arg), '0', "Reflected default value didn't equal expected value")
self.assertTrue(table.columns.answer.nullable, "Expected reflected column to be nullable.")
def test_crosschema_domain_is_reflected(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('crosschema', metadata, autoload=True)
self.assertEquals(str(table.columns.answer.default.arg), '0', "Reflected default value didn't equal expected value")
self.assertTrue(table.columns.answer.nullable, "Expected reflected column to be nullable.")
__only_on__ = 'postgres'
def test_date_reflection(self):
- m1 = MetaData(testbase.db)
+ m1 = MetaData(testing.db)
t1 = Table('pgdate', m1,
Column('date1', DateTime(timezone=True)),
Column('date2', DateTime(timezone=False))
)
m1.create_all()
try:
- m2 = MetaData(testbase.db)
+ m2 = MetaData(testing.db)
t2 = Table('pgdate', m2, autoload=True)
assert t2.c.date1.type.timezone is True
assert t2.c.date2.type.timezone is False
m1.drop_all()
def test_pg_weirdchar_reflection(self):
- meta1 = MetaData(testbase.db)
+ meta1 = MetaData(testing.db)
subject = Table("subject", meta1,
Column("id$", Integer, primary_key=True),
)
)
meta1.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
subject = Table("subject", meta2, autoload=True)
referer = Table("referer", meta2, autoload=True)
print str(subject.join(referer).onclause)
meta1.drop_all()
def test_checksfor_sequence(self):
- meta1 = MetaData(testbase.db)
+ meta1 = MetaData(testing.db)
t = Table('mytable', meta1,
Column('col1', Integer, Sequence('fooseq')))
try:
- testbase.db.execute("CREATE SEQUENCE fooseq")
+ testing.db.execute("CREATE SEQUENCE fooseq")
t.create(checkfirst=True)
finally:
t.drop(checkfirst=True)
def test_distinct_on(self):
- t = Table('mytable', MetaData(testbase.db),
+ t = Table('mytable', MetaData(testing.db),
Column('id', Integer, primary_key=True),
Column('a', String(8)))
self.assertEquals(
def test_schema_reflection(self):
"""note: this test requires that the 'alt_schema' schema be separate and accessible by the test user"""
- meta1 = MetaData(testbase.db)
+ meta1 = MetaData(testing.db)
users = Table('users', meta1,
Column('user_id', Integer, primary_key = True),
Column('user_name', String(30), nullable = False),
)
meta1.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
addresses = Table('email_addresses', meta2, autoload=True, schema="alt_schema")
users = Table('users', meta2, mustexist=True, schema="alt_schema")
meta1.drop_all()
def test_schema_reflection_2(self):
- meta1 = MetaData(testbase.db)
+ meta1 = MetaData(testing.db)
subject = Table("subject", meta1,
Column("id", Integer, primary_key=True),
)
schema="alt_schema")
meta1.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
subject = Table("subject", meta2, autoload=True)
referer = Table("referer", meta2, schema="alt_schema", autoload=True)
print str(subject.join(referer).onclause)
meta1.drop_all()
def test_schema_reflection_3(self):
- meta1 = MetaData(testbase.db)
+ meta1 = MetaData(testing.db)
subject = Table("subject", meta1,
Column("id", Integer, primary_key=True),
schema='alt_schema_2'
meta1.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
subject = Table("subject", meta2, autoload=True, schema="alt_schema_2")
referer = Table("referer", meta2, schema="alt_schema", autoload=True)
print str(subject.join(referer).onclause)
that PassiveDefault upon insert."""
try:
- meta = MetaData(testbase.db)
- testbase.db.execute("""
+ meta = MetaData(testing.db)
+ testing.db.execute("""
CREATE TABLE speedy_users
(
speedy_user_id SERIAL PRIMARY KEY,
l = t.select().execute().fetchall()
assert l == [(1, 'user', 'lala')]
finally:
- testbase.db.execute("drop table speedy_users", None)
+ testing.db.execute("drop table speedy_users", None)
def test_create_partial_index(self):
tbl = Table('testtbl', MetaData(), Column('data',Integer))
def setUpAll(self):
global tztable, notztable, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
# current_timestamp() in postgres is assumed to return TIMESTAMP WITH TIMEZONE
tztable = Table('tztable', metadata,
def test_with_timezone(self):
# get a date with a tzinfo
- somedate = testbase.db.connect().scalar(func.current_timestamp().select())
+ somedate = testing.db.connect().scalar(func.current_timestamp().select())
tztable.insert().execute(id=1, name='row1', date=somedate)
c = tztable.update(tztable.c.id==1).execute(name='newname')
print tztable.select(tztable.c.id==1).execute().fetchone()
def setUpAll(self):
global metadata, arrtable
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
arrtable = Table('arrtable', metadata,
Column('id', Integer, primary_key=True),
metadata.drop_all()
def test_reflect_array_column(self):
- metadata2 = MetaData(testbase.db)
+ metadata2 = MetaData(testing.db)
tbl = Table('arrtable', metadata2, autoload=True)
self.assertTrue(isinstance(tbl.c.intarr.type, postgres.PGArray))
self.assertTrue(isinstance(tbl.c.strarr.type, postgres.PGArray))
arrtable.delete().execute()
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
"""SQLite-specific tests."""
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime
from sqlalchemy import *
from sqlalchemy import exceptions
__only_on__ = 'sqlite'
def test_date(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
t = Table('testdate', meta,
Column('id', Integer, primary_key=True),
Column('adate', Date),
is updated in the future.
"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
t = Table('reserved', meta,
Column('safe', Integer),
Column('true', Integer),
"""Tests autoload of tables created with quoted column names."""
# This is quirky in sqlite.
- testbase.db.execute("""CREATE TABLE "django_content_type" (
+ testing.db.execute("""CREATE TABLE "django_content_type" (
"id" integer NOT NULL PRIMARY KEY,
"django_stuff" text NULL
)
""")
- testbase.db.execute("""
+ testing.db.execute("""
CREATE TABLE "django_admin_log" (
"id" integer NOT NULL PRIMARY KEY,
"action_time" datetime NOT NULL,
)
""")
try:
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table1 = Table("django_admin_log", meta, autoload=True)
table2 = Table("django_content_type", meta, autoload=True)
j = table1.join(table2)
assert j.onclause == table1.c.content_type_id==table2.c.id
finally:
- testbase.db.execute("drop table django_admin_log")
- testbase.db.execute("drop table django_content_type")
+ testing.db.execute("drop table django_admin_log")
+ testing.db.execute("drop table django_content_type")
class InsertTest(AssertMixin):
@testing.exclude('sqlite', '<', (3, 4))
def test_empty_insert_pk1(self):
self._test_empty_insert(
- Table('a', MetaData(testbase.db),
+ Table('a', MetaData(testing.db),
Column('id', Integer, primary_key=True)))
@testing.exclude('sqlite', '<', (3, 4))
self.assertRaises(
exceptions.DBAPIError,
self._test_empty_insert,
- Table('b', MetaData(testbase.db),
+ Table('b', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, primary_key=True)))
self.assertRaises(
exceptions.DBAPIError,
self._test_empty_insert,
- Table('c', MetaData(testbase.db),
+ Table('c', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, PassiveDefault('123'),
primary_key=True)))
@testing.exclude('sqlite', '<', (3, 4))
def test_empty_insert_pk4(self):
self._test_empty_insert(
- Table('d', MetaData(testbase.db),
+ Table('d', MetaData(testing.db),
Column('x', Integer, primary_key=True),
Column('y', Integer, PassiveDefault('123'))))
@testing.exclude('sqlite', '<', (3, 4))
def test_empty_insert_nopk1(self):
self._test_empty_insert(
- Table('e', MetaData(testbase.db),
+ Table('e', MetaData(testing.db),
Column('id', Integer)))
@testing.exclude('sqlite', '<', (3, 4))
def test_empty_insert_nopk2(self):
self._test_empty_insert(
- Table('f', MetaData(testbase.db),
+ Table('f', MetaData(testing.db),
Column('x', Integer),
Column('y', Integer)))
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.databases import sybase
from testlib import *
+
class BasicTest(AssertMixin):
# A simple import of the database/ module should work on all systems.
def test_import(self):
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
def suite():
modules_to_test = (
# connectivity, execution
- 'engine.parseconnect',
- 'engine.pool',
+ 'engine.parseconnect',
+ 'engine.pool',
'engine.bind',
'engine.reconnect',
'engine.execute',
'engine.metadata',
'engine.transaction',
-
+
# schema/tables
- 'engine.reflection',
+ 'engine.reflection',
)
alltests = unittest.TestSuite()
return alltests
-
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
"""tests the "bind" attribute/argument across schema, SQL, and ORM sessions,
including the deprecated versions of these arguments"""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import engine, exceptions
from testlib import *
table = Table('test_table', metadata,
Column('foo', Integer))
for bind in (
- testbase.db,
- testbase.db.connect()
+ testing.db,
+ testing.db.connect()
):
for args in [
([], {'bind':bind}),
for meta in (MetaData,ThreadLocalMetaData):
for bind in (
- testbase.db,
- testbase.db.connect()
+ testing.db,
+ testing.db.connect()
):
metadata = meta()
table = Table('test_table', metadata,
def test_create_drop_constructor_bound(self):
for bind in (
- testbase.db,
- testbase.db.connect()
+ testing.db,
+ testing.db.connect()
):
try:
for args in (
Column('foo', Integer),
test_needs_acid=True,
)
- conn = testbase.db.connect()
+ conn = testing.db.connect()
metadata.create_all(bind=conn)
try:
trans = conn.begin()
metadata = MetaData()
table = Table('test_table', metadata,
Column('foo', Integer))
- metadata.create_all(bind=testbase.db)
+ metadata.create_all(bind=testing.db)
try:
for elem in [
table.select,
lambda **kwargs:text("select * from test_table", **kwargs)
]:
for bind in (
- testbase.db,
- testbase.db.connect()
+ testing.db,
+ testing.db.connect()
):
try:
e = elem(bind=bind)
finally:
if isinstance(bind, engine.Connection):
bind.close()
- metadata.drop_all(bind=testbase.db)
+ metadata.drop_all(bind=testing.db)
def test_session(self):
from sqlalchemy.orm import create_session, mapper
class Foo(object):
pass
mapper(Foo, table)
- metadata.create_all(bind=testbase.db)
+ metadata.create_all(bind=testing.db)
try:
- for bind in (testbase.db,
- testbase.db.connect()
+ for bind in (testing.db,
+ testing.db.connect()
):
try:
for args in ({'bind':bind},):
finally:
if isinstance(bind, engine.Connection):
bind.close()
- metadata.drop_all(bind=testbase.db)
+ metadata.drop_all(bind=testing.db)
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from testlib import *
class ExecuteTest(PersistTest):
def setUpAll(self):
global users, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
users = Table('users', metadata,
Column('user_id', INT, primary_key = True),
Column('user_name', VARCHAR(20)),
metadata.create_all()
def tearDown(self):
- testbase.db.connect().execute(users.delete())
+ testing.db.connect().execute(users.delete())
def tearDownAll(self):
metadata.drop_all()
@testing.fails_on_everything_except('firebird', 'maxdb', 'sqlite')
def test_raw_qmark(self):
- for conn in (testbase.db, testbase.db.connect()):
+ for conn in (testing.db, testing.db.connect()):
conn.execute("insert into users (user_id, user_name) values (?, ?)", (1,"jack"))
conn.execute("insert into users (user_id, user_name) values (?, ?)", [2,"fred"])
conn.execute("insert into users (user_id, user_name) values (?, ?)", [3,"ed"], [4,"horse"])
@testing.fails_on_everything_except('mysql', 'postgres')
# some psycopg2 versions bomb this.
def test_raw_sprintf(self):
- for conn in (testbase.db, testbase.db.connect()):
+ for conn in (testing.db, testing.db.connect()):
conn.execute("insert into users (user_id, user_name) values (%s, %s)", [1,"jack"])
conn.execute("insert into users (user_id, user_name) values (%s, %s)", [2,"ed"], [3,"horse"])
conn.execute("insert into users (user_id, user_name) values (%s, %s)", 4, 'sally')
@testing.unsupported('mysql')
@testing.fails_on_everything_except('postgres')
def test_raw_python(self):
- for conn in (testbase.db, testbase.db.connect()):
+ for conn in (testing.db, testing.db.connect()):
conn.execute("insert into users (user_id, user_name) values (%(id)s, %(name)s)", {'id':1, 'name':'jack'})
conn.execute("insert into users (user_id, user_name) values (%(id)s, %(name)s)", {'id':2, 'name':'ed'}, {'id':3, 'name':'horse'})
conn.execute("insert into users (user_id, user_name) values (%(id)s, %(name)s)", id=4, name='sally')
@testing.fails_on_everything_except('sqlite')
def test_raw_named(self):
- for conn in (testbase.db, testbase.db.connect()):
+ for conn in (testing.db, testing.db.connect()):
conn.execute("insert into users (user_id, user_name) values (:id, :name)", {'id':1, 'name':'jack'})
conn.execute("insert into users (user_id, user_name) values (:id, :name)", {'id':2, 'name':'ed'}, {'id':3, 'name':'horse'})
conn.execute("insert into users (user_id, user_name) values (:id, :name)", id=4, name='sally')
conn.execute("delete from users")
def test_exception_wrapping(self):
- for conn in (testbase.db, testbase.db.connect()):
+ for conn in (testing.db, testing.db.connect()):
try:
conn.execute("osdjafioajwoejoasfjdoifjowejfoawejqoijwef")
assert False
assert True
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from testlib import *
metadata = MetaData()
t1 = Table('table1', metadata, Column('col1', Integer, primary_key=True),
Column('col2', String(20)))
- metadata.bind = testbase.db
+ metadata.bind = testing.db
metadata.create_all()
try:
assert t1.count().scalar() == 0
t1 = Table('table1', metadata, Column('col1', Integer, primary_key=True),
Column('col2', String(20)))
- metadata.bind = testbase.db
+ metadata.bind = testing.db
metadata.create_all()
try:
try:
assert str(e) == "Table 'table1' is already defined for this MetaData instance."
finally:
metadata.drop_all()
-
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import ConfigParser, StringIO
from sqlalchemy import *
from sqlalchemy import exceptions, pool, engine
import sqlalchemy.engine.url as url
from testlib import *
-
+
class ParseConnectTest(PersistTest):
def test_rfc1738(self):
for text in (
"""test that create_engine arguments of different types get propigated properly"""
def test_connect_query(self):
dbapi = MockDBAPI(foober='12', lala='18', fooz='somevalue')
-
+
# start the postgres dialect, but put our mock DBAPI as the module instead of psycopg
e = create_engine('postgres://scott:tiger@somehost/test?foober=12&lala=18&fooz=somevalue', module=dbapi)
c = e.connect()
assert e.pool._recycle == 50
assert e.url == url.make_url('postgres://scott:tiger@somehost/test?fooz=somevalue')
assert e.echo is True
-
+
def test_custom(self):
dbapi = MockDBAPI(foober=12, lala=18, hoho={'this':'dict'}, fooz='somevalue')
def connect():
return dbapi.connect(foober=12, lala=18, fooz='somevalue', hoho={'this':'dict'})
-
+
# start the postgres dialect, but put our mock DBAPI as the module instead of psycopg
e = create_engine('postgres://', creator=connect, module=dbapi)
c = e.connect()
-
+
def test_recycle(self):
dbapi = MockDBAPI(foober=12, lala=18, hoho={'this':'dict'}, fooz='somevalue')
e = create_engine('postgres://', pool_recycle=472, module=dbapi)
assert e.pool._recycle == 472
-
+
def test_badargs(self):
# good arg, use MockDBAPI to prevent oracle import errors
e = create_engine('oracle://', use_ansi=True, module=MockDBAPI())
-
+
try:
e = create_engine("foobar://", module=MockDBAPI())
assert False
except ImportError:
- assert True
-
+ assert True
+
# bad arg
try:
e = create_engine('postgres://', use_ansi=True, module=MockDBAPI())
assert False
except TypeError:
assert True
-
+
# bad arg
try:
e = create_engine('oracle://', lala=5, use_ansi=True, module=MockDBAPI())
assert False
except TypeError:
assert True
-
+
try:
e = create_engine('postgres://', lala=5, module=MockDBAPI())
assert False
except TypeError:
assert True
-
+
try:
e = create_engine('sqlite://', lala=5)
assert False
assert False
except TypeError:
assert True
-
+
e = create_engine('mysql://', module=MockDBAPI(), connect_args={'use_unicode':True}, convert_unicode=True)
-
+
e = create_engine('sqlite://', connect_args={'use_unicode':True}, convert_unicode=True)
try:
c = e.connect()
assert False
except exceptions.DBAPIError:
assert True
-
+
def test_urlattr(self):
"""test the url attribute on ``Engine``."""
-
+
e = create_engine('mysql://scott:tiger@localhost/test', module=MockDBAPI())
u = url.make_url('mysql://scott:tiger@localhost/test')
e2 = create_engine(u, module=MockDBAPI())
assert e.url.drivername == e2.url.drivername == 'mysql'
assert e.url.username == e2.url.username == 'scott'
assert e2.url is u
-
+
def test_poolargs(self):
"""test that connection pool args make it thru"""
e = create_engine('postgres://', creator=None, pool_recycle=50, echo_pool=None, module=MockDBAPI())
assert e.pool._recycle == 50
-
+
# these args work for QueuePool
e = create_engine('postgres://', max_overflow=8, pool_timeout=60, poolclass=pool.QueuePool, module=MockDBAPI())
def close(self):
pass
mock_dbapi = MockDBAPI()
-
+
if __name__ == "__main__":
- testbase.main()
-
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import threading, thread, time, gc
import sqlalchemy.pool as pool
import sqlalchemy.interfaces as interfaces
assert int(time.time() - now) == 2
def test_timeout_race(self):
- # test a race condition where the initial connecting threads all race to queue.Empty, then block on the mutex.
- # each thread consumes a connection as they go in. when the limit is reached, the remaining threads
- # go in, and get TimeoutError; even though they never got to wait for the timeout on queue.get().
- # the fix involves checking the timeout again within the mutex, and if so, unlocking and throwing them back to the start
- # of do_get()
+ # test a race condition where the initial connecting threads all race
+ # to queue.Empty, then block on the mutex. each thread consumes a
+ # connection as they go in. when the limit is reached, the remaining
+ # threads go in, and get TimeoutError; even though they never got to
+ # wait for the timeout on queue.get(). the fix involves checking the
+ # timeout again within the mutex, and if so, unlocking and throwing
+ # them back to the start of do_get()
p = pool.QueuePool(creator = lambda: mock_dbapi.connect('foo.db', delay=.05), pool_size = 2, max_overflow = 1, use_threadlocal = False, timeout=3)
timeouts = []
def checkout():
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import sys, weakref
from sqlalchemy import create_engine, exceptions, select
from testlib import *
for c in self.connections:
c.explode[0] = True
Error = MockDisconnect
-
+
class MockConnection(object):
def __init__(self, dbapi):
dbapi.connections[self] = True
return MockCursor(self)
def close(self):
pass
-
+
class MockCursor(object):
def __init__(self, parent):
self.explode = parent.explode
return
def close(self):
pass
-
+
class MockReconnectTest(PersistTest):
def setUp(self):
global db, dbapi
dbapi = MockDBAPI()
-
+
# create engine using our current dburi
db = create_engine('postgres://foo:bar@localhost/test', module=dbapi)
-
+
# monkeypatch disconnect checker
db.dialect.is_disconnect = lambda e: isinstance(e, MockDisconnect)
-
+
def test_reconnect(self):
"""test that an 'is_disconnect' condition will invalidate the connection, and additionally
dispose the previous connection pool and recreate."""
-
-
+
+
pid = id(db.pool)
-
+
# make a connection
conn = db.connect()
-
+
# connection works
conn.execute(select([1]))
-
+
# create a second connection within the pool, which we'll ensure also goes away
conn2 = db.connect()
conn2.close()
assert False
except exceptions.DBAPIError:
pass
-
+
# assert was invalidated
assert not conn.closed
assert conn.invalidated
-
+
# close shouldnt break
conn.close()
assert id(db.pool) != pid
-
+
# ensure all connections closed (pool was recycled)
assert len(dbapi.connections) == 0
-
+
conn =db.connect()
conn.execute(select([1]))
conn.close()
assert len(dbapi.connections) == 1
-
+
def test_invalidate_trans(self):
conn = db.connect()
trans = conn.begin()
assert False
except exceptions.DBAPIError:
pass
-
+
# assert was invalidated
assert len(dbapi.connections) == 0
assert not conn.closed
trans.rollback()
assert not trans.is_active
-
+
conn.execute(select([1]))
assert not conn.invalidated
-
+
assert len(dbapi.connections) == 1
-
+
def test_conn_reusable(self):
conn = db.connect()
-
+
conn.execute(select([1]))
assert len(dbapi.connections) == 1
-
+
dbapi.shutdown()
# raises error
# ensure all connections closed (pool was recycled)
assert len(dbapi.connections) == 0
-
+
# test reconnects
conn.execute(select([1]))
assert not conn.invalidated
assert len(dbapi.connections) == 1
-
+
class RealReconnectTest(PersistTest):
def setUp(self):
global engine
engine = engines.reconnecting_engine()
-
+
def tearDown(self):
engine.dispose()
-
+
def test_reconnect(self):
conn = engine.connect()
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
assert not conn.closed
engine.test_shutdown()
assert conn.invalidated
assert conn.invalidated
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
assert not conn.invalidated
# one more time
if not e.connection_invalidated:
raise
assert conn.invalidated
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
assert not conn.invalidated
conn.close()
-
+
def test_close(self):
conn = engine.connect()
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
assert not conn.closed
engine.test_shutdown()
conn.close()
conn = engine.connect()
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
-
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
+
def test_with_transaction(self):
conn = engine.connect()
trans = conn.begin()
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
assert not conn.closed
engine.test_shutdown()
assert str(e) == "Can't reconnect until invalid transaction is rolled back"
assert trans.is_active
-
+
trans.rollback()
assert not trans.is_active
assert conn.invalidated
- self.assertEquals(conn.execute(select([1])).scalar(), 1)
+ self.assertEquals(conn.execute(select([1])).scalar(), 1)
assert not conn.invalidated
-
-
+
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import pickle, StringIO, unicodedata
from sqlalchemy import *
from sqlalchemy import exceptions
use_function_defaults = testing.against('postgres', 'oracle', 'maxdb')
use_string_defaults = (use_function_defaults or
- testbase.db.engine.__module__.endswith('sqlite'))
+ testing.db.engine.__module__.endswith('sqlite'))
if use_function_defaults:
defval = func.current_date()
deftype2, deftype3 = Integer, Integer
defval2, defval3 = "15", "16"
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
users = Table('engine_users', meta,
Column('user_id', INT, primary_key = True),
users.drop()
def test_autoload_partial(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
foo = Table('foo', meta,
Column('a', String(30)),
Column('b', String(30)),
)
meta.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
foo2 = Table('foo', meta2, autoload=True, include_columns=['b', 'f', 'e'])
# test that cols come back in original order
assert [c.name for c in foo2.c] == ['b', 'e', 'f']
"""test that you can override columns and create new foreign keys to other reflected tables
which have no foreign keys. this is common with MySQL MyISAM tables."""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
users = Table('users', meta,
Column('id', Integer, primary_key=True),
Column('name', String(30)))
meta.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
Column('user_id', Integer, ForeignKey('users.id')),
autoload=True)
assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id
assert u2.join(a2).onclause == u2.c.id==a2.c.user_id
- meta3 = MetaData(testbase.db)
+ meta3 = MetaData(testing.db)
u3 = Table('users', meta3, autoload=True)
a3 = Table('addresses', meta3,
Column('user_id', Integer, ForeignKey('users.id')),
assert u3.join(a3).onclause == u3.c.id==a3.c.user_id
- meta4 = MetaData(testbase.db)
+ meta4 = MetaData(testing.db)
u4 = Table('users', meta4,
Column('id', Integer, key='u_id', primary_key=True),
autoload=True)
def test_unknown_types(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
t = Table("test", meta,
Column('foo', DateTime))
import sys
- dialect_module = sys.modules[testbase.db.dialect.__module__]
+ dialect_module = sys.modules[testing.db.dialect.__module__]
# we're relying on the presence of "ischema_names" in the
# dialect module, else we can't test this. we need to be able
dialect_module.ischema_names = {}
try:
try:
- m2 = MetaData(testbase.db)
+ m2 = MetaData(testing.db)
t2 = Table("test", m2, autoload=True)
assert False
except exceptions.SAWarning:
@testing.emits_warning('Did not recognize type')
def warns():
- m3 = MetaData(testbase.db)
+ m3 = MetaData(testing.db)
t3 = Table("test", m3, autoload=True)
assert t3.c.foo.type.__class__ == sqltypes.NullType
def test_override_fkandpkcol(self):
"""test that you can override columns which contain foreign keys to other reflected tables,
where the foreign key column is also a primary key column"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
users = Table('users', meta,
Column('id', Integer, primary_key=True),
Column('name', String(30)))
meta.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
Column('id', Integer, ForeignKey('users.id'), primary_key=True, ),
autoload=True)
#sess.save(add1)
#sess.flush()
- meta3 = MetaData(testbase.db)
+ meta3 = MetaData(testing.db)
u3 = Table('users', meta3, autoload=True)
a3 = Table('addresses', meta3,
Column('id', Integer, ForeignKey('users.id'), primary_key=True),
on columns which *do* already have that foreign key, and that the FK is not duped.
"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
users = Table('users', meta,
Column('id', Integer, primary_key=True),
Column('name', String(30)),
meta.create_all()
try:
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
Column('user_id',Integer, ForeignKey('users.id')),
autoload=True)
assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id
assert u2.join(a2).onclause == u2.c.id==a2.c.user_id
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
u2 = Table('users', meta2,
Column('id', Integer, primary_key=True),
autoload=True)
def test_pks_not_uniques(self):
"""test that primary key reflection not tripped up by unique indexes"""
- testbase.db.execute("""
+ testing.db.execute("""
CREATE TABLE book (
id INTEGER NOT NULL,
title VARCHAR(100) NOT NULL,
PRIMARY KEY(id)
)""")
try:
- metadata = MetaData(bind=testbase.db)
+ metadata = MetaData(bind=testing.db)
book = Table('book', metadata, autoload=True)
assert book.c.id in book.primary_key
assert book.c.series not in book.primary_key
assert len(book.primary_key) == 1
finally:
- testbase.db.execute("drop table book")
+ testing.db.execute("drop table book")
def test_fk_error(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
slots_table = Table('slots', metadata,
Column('slot_id', Integer, primary_key=True),
Column('pkg_id', Integer, ForeignKey('pkgs.pkg_id')),
def test_composite_pks(self):
"""test reflection of a composite primary key"""
- testbase.db.execute("""
+ testing.db.execute("""
CREATE TABLE book (
id INTEGER NOT NULL,
isbn VARCHAR(50) NOT NULL,
PRIMARY KEY(id, isbn)
)""")
try:
- metadata = MetaData(bind=testbase.db)
+ metadata = MetaData(bind=testing.db)
book = Table('book', metadata, autoload=True)
assert book.c.id in book.primary_key
assert book.c.isbn in book.primary_key
assert book.c.series not in book.primary_key
assert len(book.primary_key) == 2
finally:
- testbase.db.execute("drop table book")
+ testing.db.execute("drop table book")
@testing.exclude('mysql', '<', (4, 1, 1))
def test_composite_fk(self):
"""test reflection of composite foreign keys"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table(
'multi', meta,
Column('multi_id', Integer, primary_key=True),
return (table_c, table2_c)
def test_pickle():
- meta.bind = testbase.db
+ meta.bind = testing.db
meta2 = pickle.loads(pickle.dumps(meta))
assert meta2.bind is None
meta3 = pickle.loads(pickle.dumps(meta2))
def test_pickle_via_reflect():
# this is the most common use case, pickling the results of a
# database reflection
- meta2 = MetaData(bind=testbase.db)
+ meta2 = MetaData(bind=testing.db)
t1 = Table('mytable', meta2, autoload=True)
t2 = Table('othertable', meta2, autoload=True)
meta3 = pickle.loads(pickle.dumps(meta2))
assert meta3.tables['mytable'] is not t1
return (meta3.tables['mytable'], meta3.tables['othertable'])
- meta.create_all(testbase.db)
+ meta.create_all(testing.db)
try:
for test, has_constraints in ((test_to_metadata, True), (test_pickle, True), (test_pickle_via_reflect, False)):
table_c, table2_c = test()
assert c.columns.contains_column(table_c.c.name)
assert not c.columns.contains_column(table.c.name)
finally:
- meta.drop_all(testbase.db)
+ meta.drop_all(testing.db)
def test_nonexistent(self):
self.assertRaises(exceptions.NoSuchTableError, Table,
'fake_table',
- MetaData(testbase.db), autoload=True)
+ MetaData(testing.db), autoload=True)
def testoverride(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table(
'override_test', meta,
Column('col1', Integer, primary_key=True),
table.create()
# clear out table registry
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
try:
table = Table(
'override_test', meta2,
@testing.unsupported('oracle')
def testreserved(self):
# check a table that uses an SQL reserved name doesn't cause an error
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table_a = Table('select', meta,
Column('not', Integer, primary_key=True),
Column('from', String(12), nullable=False),
index_c.drop()
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
try:
table_a2 = Table('select', meta2, autoload=True)
table_b2 = Table('false', meta2, autoload=True)
meta.drop_all()
def test_reflect_all(self):
- existing = testbase.db.table_names()
+ existing = testing.db.table_names()
names = ['rt_%s' % name for name in ('a','b','c','d','e')]
nameset = set(names)
self.assert_(name not in existing)
self.assert_('rt_f' not in existing)
- baseline = MetaData(testbase.db)
+ baseline = MetaData(testing.db)
for name in names:
Table(name, baseline, Column('id', Integer, primary_key=True))
baseline.create_all()
try:
- m1 = MetaData(testbase.db)
+ m1 = MetaData(testing.db)
self.assert_(not m1.tables)
m1.reflect()
self.assert_(nameset.issubset(set(m1.tables.keys())))
m2 = MetaData()
- m2.reflect(testbase.db, only=['rt_a', 'rt_b'])
+ m2.reflect(testing.db, only=['rt_a', 'rt_b'])
self.assert_(set(m2.tables.keys()) == set(['rt_a', 'rt_b']))
m3 = MetaData()
- c = testbase.db.connect()
+ c = testing.db.connect()
m3.reflect(bind=c, only=lambda name, meta: name == 'rt_c')
self.assert_(set(m3.tables.keys()) == set(['rt_c']))
- m4 = MetaData(testbase.db)
+ m4 = MetaData(testing.db)
try:
m4.reflect(only=['rt_a', 'rt_f'])
self.assert_(False)
except exceptions.InvalidRequestError, e:
self.assert_(e.args[0].endswith('(rt_f)'))
- m5 = MetaData(testbase.db)
+ m5 = MetaData(testing.db)
m5.reflect(only=[])
self.assert_(not m5.tables)
- m6 = MetaData(testbase.db)
+ m6 = MetaData(testing.db)
m6.reflect(only=lambda n, m: False)
self.assert_(not m6.tables)
- m7 = MetaData(testbase.db, reflect=True)
+ m7 = MetaData(testing.db, reflect=True)
self.assert_(nameset.issubset(set(m7.tables.keys())))
try:
if existing:
print "Other tables present in database, skipping some checks."
else:
- m9 = MetaData(testbase.db)
+ m9 = MetaData(testing.db)
m9.reflect()
self.assert_(not m9.tables)
def testcheckfirst(self):
try:
- assert not users.exists(testbase.db)
- users.create(bind=testbase.db)
- assert users.exists(testbase.db)
- users.create(bind=testbase.db, checkfirst=True)
- users.drop(bind=testbase.db)
- users.drop(bind=testbase.db, checkfirst=True)
- assert not users.exists(bind=testbase.db)
- users.create(bind=testbase.db, checkfirst=True)
- users.drop(bind=testbase.db)
+ assert not users.exists(testing.db)
+ users.create(bind=testing.db)
+ assert users.exists(testing.db)
+ users.create(bind=testing.db, checkfirst=True)
+ users.drop(bind=testing.db)
+ users.drop(bind=testing.db, checkfirst=True)
+ assert not users.exists(bind=testing.db)
+ users.create(bind=testing.db, checkfirst=True)
+ users.drop(bind=testing.db)
finally:
- metadata.drop_all(bind=testbase.db)
+ metadata.drop_all(bind=testing.db)
@testing.exclude('mysql', '<', (4, 1, 1))
def test_createdrop(self):
- metadata.create_all(bind=testbase.db)
- self.assertEqual( testbase.db.has_table('items'), True )
- self.assertEqual( testbase.db.has_table('email_addresses'), True )
- metadata.create_all(bind=testbase.db)
- self.assertEqual( testbase.db.has_table('items'), True )
-
- metadata.drop_all(bind=testbase.db)
- self.assertEqual( testbase.db.has_table('items'), False )
- self.assertEqual( testbase.db.has_table('email_addresses'), False )
- metadata.drop_all(bind=testbase.db)
- self.assertEqual( testbase.db.has_table('items'), False )
+ metadata.create_all(bind=testing.db)
+ self.assertEqual( testing.db.has_table('items'), True )
+ self.assertEqual( testing.db.has_table('email_addresses'), True )
+ metadata.create_all(bind=testing.db)
+ self.assertEqual( testing.db.has_table('items'), True )
+
+ metadata.drop_all(bind=testing.db)
+ self.assertEqual( testing.db.has_table('items'), False )
+ self.assertEqual( testing.db.has_table('email_addresses'), False )
+ metadata.drop_all(bind=testing.db)
+ self.assertEqual( testing.db.has_table('items'), False )
def test_tablenames(self):
from sqlalchemy.util import Set
- metadata.create_all(bind=testbase.db)
+ metadata.create_all(bind=testing.db)
# we only check to see if all the explicitly created tables are there, rather than
# assertEqual -- the test db could have "extra" tables if there is a misconfigured
# template. (*cough* tsearch2 w/ the pg windows installer.)
- self.assert_(not Set(metadata.tables) - Set(testbase.db.table_names()))
- metadata.drop_all(bind=testbase.db)
+ self.assert_(not Set(metadata.tables) - Set(testing.db.table_names()))
+ metadata.drop_all(bind=testing.db)
class UnicodeTest(PersistTest):
buf = StringIO.StringIO()
def foo(s, p=None):
buf.write(s)
- gen = create_engine(testbase.db.name + "://", strategy="mock", executor=foo)
+ gen = create_engine(testing.db.name + "://", strategy="mock", executor=foo)
gen = gen.dialect.schemagenerator(gen.dialect, gen)
gen.traverse(table1)
gen.traverse(table2)
buf = buf.getvalue()
print buf
- if testbase.db.dialect.preparer(testbase.db.dialect).omit_schema:
+ if testing.db.dialect.preparer(testing.db.dialect).omit_schema:
assert buf.index("CREATE TABLE table1") > -1
assert buf.index("CREATE TABLE table2") > -1
else:
# fixme: revisit these below.
@testing.fails_on('oracle', 'mssql', 'sybase', 'access')
def test_explicit_default_schema(self):
- engine = testbase.db
+ engine = testing.db
if testing.against('mysql'):
- schema = testbase.db.url.database
+ schema = testing.db.url.database
elif testing.against('postgres'):
schema = 'public'
else:
@testing.unsupported('sqlite', 'mysql', 'mssql', 'access', 'sybase')
def test_hassequence(self):
- metadata.create_all(bind=testbase.db)
- self.assertEqual(testbase.db.dialect.has_sequence(testbase.db, 'user_id_seq'), True)
- metadata.drop_all(bind=testbase.db)
- self.assertEqual(testbase.db.dialect.has_sequence(testbase.db, 'user_id_seq'), False)
+ metadata.create_all(bind=testing.db)
+ self.assertEqual(testing.db.dialect.has_sequence(testing.db, 'user_id_seq'), True)
+ metadata.drop_all(bind=testing.db)
+ self.assertEqual(testing.db.dialect.has_sequence(testing.db, 'user_id_seq'), False)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import sys, time, threading
from sqlalchemy import *
Column('user_name', VARCHAR(20)),
test_needs_acid=True,
)
- users.create(testbase.db)
+ users.create(testing.db)
def tearDown(self):
- testbase.db.connect().execute(users.delete())
+ testing.db.connect().execute(users.delete())
def tearDownAll(self):
- users.drop(testbase.db)
+ users.drop(testing.db)
def testcommits(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
transaction.commit()
def testrollback(self):
"""test a basic rollback"""
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
connection.execute(users.insert(), user_id=2, user_name='user2')
connection.close()
def testraise(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
try:
@testing.exclude('mysql', '<', (5, 0, 3))
def testnestedrollback(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
try:
transaction = connection.begin()
@testing.exclude('mysql', '<', (5, 0, 3))
def testnesting(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
connection.execute(users.insert(), user_id=2, user_name='user2')
@testing.exclude('mysql', '<', (5, 0, 3))
def testclose(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
connection.execute(users.insert(), user_id=2, user_name='user2')
@testing.exclude('mysql', '<', (5, 0, 3))
def testclose2(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
connection.execute(users.insert(), user_id=2, user_name='user2')
@testing.unsupported('sqlite', 'mssql', 'firebird', 'sybase', 'access')
@testing.exclude('mysql', '<', (5, 0, 3))
def testnestedsubtransactionrollback(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
trans2 = connection.begin_nested()
@testing.unsupported('sqlite', 'mssql', 'firebird', 'sybase', 'access')
@testing.exclude('mysql', '<', (5, 0, 3))
def testnestedsubtransactioncommit(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
trans2 = connection.begin_nested()
@testing.unsupported('sqlite', 'mssql', 'firebird', 'sybase', 'access')
@testing.exclude('mysql', '<', (5, 0, 3))
def testrollbacktosubtransaction(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
trans2 = connection.begin_nested()
'oracle', 'maxdb')
@testing.exclude('mysql', '<', (5, 0, 3))
def testtwophasetransaction(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin_twophase()
connection.execute(users.insert(), user_id=1, user_name='user1')
'oracle', 'maxdb')
@testing.exclude('mysql', '<', (5, 0, 3))
def testmixedtwophasetransaction(self):
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin_twophase()
connection.execute(users.insert(), user_id=1, user_name='user1')
# MySQL recovery doesn't currently seem to work correctly
# Prepared transactions disappear when connections are closed and even
# when they aren't it doesn't seem possible to use the recovery id.
- connection = testbase.db.connect()
+ connection = testing.db.connect()
transaction = connection.begin_twophase()
connection.execute(users.insert(), user_id=1, user_name='user1')
transaction.prepare()
connection.close()
- connection2 = testbase.db.connect()
+ connection2 = testing.db.connect()
self.assertEquals(
connection2.execute(select([users.c.user_id]).order_by(users.c.user_id)).fetchall(),
'oracle', 'maxdb')
@testing.exclude('mysql', '<', (5, 0, 3))
def testmultipletwophase(self):
- conn = testbase.db.connect()
+ conn = testing.db.connect()
xa = conn.begin_twophase()
conn.execute(users.insert(), user_id=1, user_name='user1')
metadata = MetaData()
def tearDownAll(self):
- metadata.drop_all(testbase.db)
+ metadata.drop_all(testing.db)
@testing.unsupported('sqlite')
def testrollback_deadlock(self):
"""test that returning connections to the pool clears any object locks."""
- conn1 = testbase.db.connect()
- conn2 = testbase.db.connect()
+ conn1 = testing.db.connect()
+ conn2 = testing.db.connect()
users = Table('deadlock_users', metadata,
Column('user_id', INT, primary_key = True),
Column('user_name', VARCHAR(20)),
class TLTransactionTest(PersistTest):
def setUpAll(self):
global users, metadata, tlengine
- tlengine = create_engine(testbase.db.url, strategy='threadlocal')
+ tlengine = create_engine(testing.db.url, strategy='threadlocal')
metadata = MetaData()
users = Table('query_users', metadata,
Column('user_id', INT, Sequence('query_users_id_seq', optional=True), primary_key=True),
Column('counter_value', INT),
test_needs_acid=True,
)
- counters.create(testbase.db)
+ counters.create(testing.db)
def tearDown(self):
- testbase.db.connect().execute(counters.delete())
+ testing.db.connect().execute(counters.delete())
def tearDownAll(self):
- counters.drop(testbase.db)
+ counters.drop(testing.db)
def increment(self, count, errors, update_style=True, delay=0.005):
- con = testbase.db.connect()
+ con = testing.db.connect()
sel = counters.select(for_update=update_style,
whereclause=counters.c.counter_id==1)
with each mutator trying to increment a value stored in user_name.
"""
- db = testbase.db
+ db = testing.db
db.execute(counters.insert(), counter_id=1, counter_value=0)
iterations, thread_count = 10, 5
def overlap(self, ids, errors, update_style):
sel = counters.select(for_update=update_style,
whereclause=counters.c.counter_id.in_(ids))
- con = testbase.db.connect()
+ con = testing.db.connect()
trans = con.begin()
try:
rows = con.execute(sel).fetchall()
con.close()
def _threaded_overlap(self, thread_count, groups, update_style=True, pool=5):
- db = testbase.db
+ db = testing.db
for cid in range(pool - 1):
db.execute(counters.insert(), counter_id=cid + 1, counter_value=0)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from datetime import datetime
from sqlalchemy.ext.activemapper import ActiveMapper, column, one_to_many, one_to_one, many_to_many, objectstore
postal_code = column(String(128))
person_id = column(Integer, foreign_key=ForeignKey('person.id'))
- activemapper.metadata.bind = testbase.db
+ activemapper.metadata.bind = testing.db
activemapper.create_tables()
def tearDownAll(self):
objectstore.registry.set(s1)
objectstore.flush()
# Only dialects with a sane rowcount can detect the ConcurrentModificationError
- if testbase.db.dialect.supports_sane_rowcount:
+ if testing.db.dialect.supports_sane_rowcount:
assert False
except exceptions.ConcurrentModificationError:
pass
name = column(String(30))
foorel = many_to_many("foo", secondarytable, backref='bazrel')
- activemapper.metadata.bind = testbase.db
+ activemapper.metadata.bind = testing.db
activemapper.create_tables()
# Create a couple of activemapper objects
parent_id = column(Integer, foreign_key=ForeignKey('treenode.id'))
children = one_to_many('TreeNode', colname='id', backref='parent')
- activemapper.metadata.bind = testbase.db
+ activemapper.metadata.bind = testing.db
activemapper.create_tables()
def tearDownAll(self):
clear_mappers()
assert (t.parent is TreeNode.query.filter_by(name='node1').one())
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest, doctest
def suite():
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import create_session, clear_mappers, relation, class_mapper
class AssignMapperTest(PersistTest):
def setUpAll(self):
global metadata, table, table2
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30)))
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
def setUp(self):
collection_class = self.collection_class
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True),
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
- p1.children['e'] = 'changed-in-place'
+ p1.children['e'] = 'changed-in-place'
self.assert_(p1.children['e'] == 'changed-in-place')
inplace_id = p1._children['e'].id
p1 = self.roundtrip(p1)
class ScalarTest(PersistTest):
def test_scalar_proxy(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True),
class LazyLoadTest(PersistTest):
def setUp(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True),
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
-
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.orderinglist import *
from testlib import *
+
metadata = None
-# order in whole steps
+# order in whole steps
def step_numbering(step):
def f(index, collection):
return step * index
global metadata, slides_table, bullets_table, Slide, Bullet
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
slides_table = Table('test_Slides', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(128)))
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 4)
-
+
titles = ['s1/b1','s1/b2','s1/b100','s1/b4']
found = [b.text for b in srt.bullets]
self.assert_(s1.bullets[0].position == 1)
self.assert_(s1.bullets[1].position == 2)
self.assert_(s1.bullets[2].position == 3)
-
+
s1.bullets.append(Bullet('s1/b4'))
self.assert_(s1.bullets[0].position == 1)
self.assert_(s1.bullets[1].position == 2)
found = [b.text for b in srt.bullets]
self.assert_(titles == found)
-
+
def test_insert(self):
self._setup(ordering_list('position'))
self.assert_(s1.bullets[1].position == 1)
self.assert_(s1.bullets[2].position == 2)
self.assert_(s1.bullets[3].position == 3)
-
+
s1.bullets.insert(2, Bullet('insert_at_2'))
self.assert_(s1.bullets[0].position == 0)
self.assert_(s1.bullets[1].position == 1)
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 6)
-
+
texts = ['1','2','insert_at_2','3','4','999']
found = [b.text for b in srt.bullets]
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 3)
-
+
texts = ['1', '6', '3']
for i, text in enumerate(texts):
self.assert_(srt.bullets[i].position == i)
session.clear()
srt = session.query(Slide).get(id)
-
+
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 3)
self.assert_(srt.bullets[1].text == 'new 2')
self.assert_(srt.bullets[2].text == '3')
-
+
def test_funky_ordering(self):
class Pos(object):
def __init__(self):
fibbed.insert(2, Pos())
fibbed.insert(4, Pos())
fibbed.insert(6, Pos())
-
+
for li, pos in (0,1), (1,2), (2,3), (3,5), (4,8), (5,13), (6,21), (7,34):
self.assert_(fibbed[li].position == pos)
for li, pos in (0,'A'), (1,'B'), (2,'C'), (3,'D'):
self.assert_(alpha[li].position == pos)
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
import inheritance.alltests as inheritance
'orm.assorted_eager',
'orm.naturalpks',
- 'orm.sessioncontext',
+ 'orm.sessioncontext',
'orm.unitofwork',
'orm.session',
'orm.cascade',
'orm.merge',
'orm.pickled',
'orm.memusage',
-
+
'orm.cycles',
'orm.entity',
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
@testing.uses_deprecated('association option')
def setUpAll(self):
global items, item_keywords, keywords, metadata, Item, Keyword, KeywordAssociation
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
items = Table('items', metadata,
Column('item_id', Integer, primary_key=True),
Column('name', String(40)),
class AssociationTest2(PersistTest):
def setUpAll(self):
global table_originals, table_people, table_isauthor, metadata, Originals, People, IsAuthor
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table_originals = Table('Originals', metadata,
Column('ID', Integer, primary_key=True),
Column('Title', String(200), nullable=False),
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
"""eager loading unittests derived from mailing list-reported problems and trac tickets."""
-import testbase
+import testenv; testenv.configure_for_tests()
import random, datetime
from sqlalchemy import *
from sqlalchemy.orm import *
class EagerTest(AssertMixin):
def setUpAll(self):
global dbmeta, owners, categories, tests, options, Owner, Category, Test, Option, false
- dbmeta = MetaData(testbase.db)
+ dbmeta = MetaData(testing.db)
# determine a literal value for "false" based on the dialect
# FIXME: this PassiveDefault setup is bogus.
- bp = Boolean().dialect_impl(testbase.db.dialect).bind_processor(testbase.db.dialect)
+ bp = Boolean().dialect_impl(testing.db.dialect).bind_processor(testing.db.dialect)
if bp:
false = str(bp(False))
elif testing.against('maxdb'):
class EagerTest2(AssertMixin):
def setUpAll(self):
global metadata, middle, left, right
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
middle = Table('middle', metadata,
Column('id', Integer, primary_key = True),
Column('data', String(50)),
'right': relation(Right, lazy=False, backref=backref('middle', lazy=False)),
}
)
- session = create_session(bind=testbase.db)
+ session = create_session(bind=testing.db)
p = Middle('test1')
p.left.append(Left('tag1'))
p.right.append(Right('tag2'))
)
def setUp(self):
- testbase.db.execute(project_t.insert(), {'id':1})
- testbase.db.execute(task_status_t.insert(), {'id':1})
- testbase.db.execute(task_type_t.insert(), {'id':1})
- testbase.db.execute(task_t.insert(), {'title':u'task 1', 'task_type_id':1, 'status_id':1, 'prj_id':1})
+ testing.db.execute(project_t.insert(), {'id':1})
+ testing.db.execute(task_status_t.insert(), {'id':1})
+ testing.db.execute(task_type_t.insert(), {'id':1})
+ testing.db.execute(task_t.insert(), {'title':u'task 1', 'task_type_id':1, 'status_id':1, 'prj_id':1})
@testing.fails_on('maxdb')
def test_nested_joins(self):
for e in acc.entries:
assert e.account is acc
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import pickle
import sqlalchemy.orm.attributes as attributes
from sqlalchemy.orm.collections import collection
def test_basic(self):
class User(object):pass
-
+
attributes.register_class(User)
attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
attributes.register_attribute(User, 'email_address', uselist = False, useobject=False)
-
+
u = User()
u.user_id = 7
u.user_name = 'john'
u.email_address = 'lala@123.com'
-
+
self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com')
u._state.commit_all()
self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com')
pk_o2 = pickle.dumps(o2)
# so... pickle is creating a new 'mt2' string after a roundtrip here,
- # so we'll brute-force set it to be id-equal to the original string
+ # so we'll brute-force set it to be id-equal to the original string
if False:
o_mt2_str = [ k for k in o.__dict__ if k == 'mt2'][0]
o2_mt2_str = [ k for k in o2.__dict__ if k == 'mt2'][0]
def test_deferred(self):
class Foo(object):pass
-
+
data = {'a':'this is a', 'b':12}
def loader(instance, keys):
for k in keys:
instance.__dict__[k] = data[k]
return attributes.ATTR_WAS_SET
-
+
attributes.register_class(Foo, deferred_scalar_loader=loader)
attributes.register_attribute(Foo, 'a', uselist=False, useobject=False)
attributes.register_attribute(Foo, 'b', uselist=False, useobject=False)
-
+
f = Foo()
f._state.expire_attributes(None)
self.assertEquals(f.a, "this is a")
self.assertEquals(f.b, 12)
-
+
f.a = "this is some new a"
f._state.expire_attributes(None)
self.assertEquals(f.a, "this is a")
del f.a
self.assertEquals(f.a, None)
self.assertEquals(f.b, 12)
-
+
f._state.commit_all()
self.assertEquals(f.a, None)
self.assertEquals(f.b, 12)
for k in keys:
instance.__dict__[k] = data[k]
return attributes.ATTR_WAS_SET
-
+
attributes.register_class(MyTest, deferred_scalar_loader=loader)
attributes.register_attribute(MyTest, 'a', uselist=False, useobject=False)
attributes.register_attribute(MyTest, 'b', uselist=False, useobject=False)
-
+
m = MyTest()
m._state.expire_attributes(None)
assert 'a' not in m.__dict__
assert 'a' not in m2.__dict__
self.assertEquals(m2.a, "this is a")
self.assertEquals(m2.b, 12)
-
+
def test_list(self):
class User(object):pass
class Address(object):pass
-
+
attributes.register_class(User)
attributes.register_class(Address)
attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
attributes.register_attribute(User, 'addresses', uselist = True, useobject=True)
attributes.register_attribute(Address, 'address_id', uselist = False, useobject=False)
attributes.register_attribute(Address, 'email_address', uselist = False, useobject=False)
-
+
u = User()
u.user_id = 7
u.user_name = 'john'
a.email_address = 'foo@bar.com'
u.addresses.append(a)
self.assert_(u.user_id == 7 and u.user_name == 'heythere' and u.addresses[0].email_address == 'lala@123.com' and u.addresses[1].email_address == 'foo@bar.com')
-
+
def test_lazytrackparent(self):
"""test that the "hasparent" flag works properly when lazy loaders and backrefs are used"""
class Blog(object):pass
attributes.register_class(Post)
attributes.register_class(Blog)
-
- # set up instrumented attributes with backrefs
+
+ # set up instrumented attributes with backrefs
attributes.register_attribute(Post, 'blog', uselist=False, extension=attributes.GenericBackrefExtension('posts'), trackparent=True, useobject=True)
attributes.register_attribute(Blog, 'posts', uselist=True, extension=attributes.GenericBackrefExtension('blog'), trackparent=True, useobject=True)
# assert connections
assert p1.blog is b
assert p1 in b.posts
-
+
# manual connections
b2 = Blog()
p2 = Post()
b2.posts.append(p2)
assert attributes.has_parent(Blog, p2, 'posts')
assert attributes.has_parent(Post, b2, 'blog')
-
+
def test_inheritance(self):
"""tests that attributes are polymorphic"""
class Foo(object):pass
class Bar(Foo):pass
-
-
+
+
attributes.register_class(Foo)
attributes.register_class(Bar)
-
+
def func1():
print "func1"
return "this is the foo attr"
attributes.register_attribute(Foo, 'element', uselist=False, callable_=lambda o:func1, useobject=True)
attributes.register_attribute(Foo, 'element2', uselist=False, callable_=lambda o:func3, useobject=True)
attributes.register_attribute(Bar, 'element', uselist=False, callable_=lambda o:func2, useobject=True)
-
+
x = Foo()
y = Bar()
assert x.element == 'this is the foo attr'
def __init__(self):
states.add(self._state)
Foo.__init__(self)
-
-
+
+
attributes.register_class(Foo)
attributes.register_class(Bar)
-
+
b = Bar()
self.assertEquals(len(states), 1)
self.assertEquals(list(states)[0].obj(), b)
-
+
def test_inheritance2(self):
"""test that the attribute manager can properly traverse the managed attributes of an object,
if the object is of a descendant class with managed attributes in the parent class"""
class Foo(object):pass
class Bar(Foo):pass
-
+
attributes.register_class(Foo)
attributes.register_class(Bar)
attributes.register_attribute(Foo, 'element', uselist=False, useobject=True)
pass
class Bar(fixtures.Base):
pass
-
+
attributes.register_class(Foo)
attributes.register_class(Bar)
x._state.commit_all()
x.col2.append(bar4)
self.assertEquals(attributes.get_history(x._state, 'col2'), ([bar4], [bar1, bar2, bar3], []))
-
- def test_parenttrack(self):
+
+ def test_parenttrack(self):
class Foo(object):pass
class Bar(object):pass
-
+
attributes.register_class(Foo)
attributes.register_class(Bar)
-
+
attributes.register_attribute(Foo, 'element', uselist=False, trackparent=True, useobject=True)
attributes.register_attribute(Bar, 'element', uselist=False, trackparent=True, useobject=True)
-
+
f1 = Foo()
f2 = Foo()
b1 = Bar()
b2 = Bar()
-
+
f1.element = b1
b2.element = f2
-
+
assert attributes.has_parent(Foo, b1, 'element')
assert not attributes.has_parent(Foo, b2, 'element')
assert not attributes.has_parent(Foo, f2, 'element')
assert attributes.has_parent(Bar, f2, 'element')
-
+
b2.element = None
assert not attributes.has_parent(Bar, f2, 'element')
-
+
# test that double assignment doesn't accidentally reset the 'parent' flag.
b3 = Bar()
f4 = Foo()
def test_mutablescalars(self):
"""test detection of changes on mutable scalar items"""
class Foo(object):pass
-
+
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'element', uselist=False, copy_function=lambda x:[y for y in x], mutable_scalars=True, useobject=False)
x = Foo()
- x.element = ['one', 'two', 'three']
+ x.element = ['one', 'two', 'three']
x._state.commit_all()
x.element[1] = 'five'
assert x._state.is_modified()
-
+
attributes.unregister_class(Foo)
-
+
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'element', uselist=False, useobject=False)
x = Foo()
- x.element = ['one', 'two', 'three']
+ x.element = ['one', 'two', 'three']
x._state.commit_all()
x.element[1] = 'five'
assert not x._state.is_modified()
-
+
def test_descriptorattributes(self):
"""changeset: 1633 broke ability to use ORM to map classes with unusual
descriptor attributes (for example, classes that inherit from ones
class Foo(object):
A = des()
-
+
attributes.unregister_class(Foo)
-
+
def test_collectionclasses(self):
-
+
class Foo(object):pass
attributes.register_class(Foo)
attributes.register_attribute(Foo, "collection", uselist=True, typecallable=set, useobject=True)
assert isinstance(Foo().collection, set)
-
+
attributes.unregister_attribute(Foo, "collection")
try:
assert False
except exceptions.ArgumentError, e:
assert str(e) == "Type InstrumentedDict must elect an appender method to be a collection class"
-
+
class MyDict(dict):
@collection.appender
def append(self, item):
assert isinstance(Foo().collection, MyDict)
attributes.unregister_attribute(Foo, "collection")
-
+
class MyColl(object):pass
try:
attributes.register_attribute(Foo, "collection", uselist=True, typecallable=MyColl, useobject=True)
assert False
except exceptions.ArgumentError, e:
assert str(e) == "Type MyColl must elect an appender method to be a collection class"
-
+
class MyColl(object):
@collection.iterator
def __iter__(self):
class BackrefTest(PersistTest):
-
+
def test_manytomany(self):
class Student(object):pass
class Course(object):pass
self.assert_(s2.courses == [c])
self.assert_(s1.courses == [c])
s1.courses.remove(c)
- self.assert_(c.students == [s2,s3])
-
+ self.assert_(c.students == [s2,s3])
+
def test_onetomany(self):
class Post(object):pass
class Blog(object):pass
-
+
attributes.register_class(Post)
attributes.register_class(Blog)
attributes.register_attribute(Post, 'blog', uselist=False, extension=attributes.GenericBackrefExtension('posts'), trackparent=True, useobject=True)
class DeferredBackrefTest(PersistTest):
def setUp(self):
global Post, Blog, called, lazy_load
-
+
class Post(object):
def __init__(self, name):
self.name = name
def test_scalar(self):
class Foo(fixtures.Base):
pass
-
+
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=False, useobject=False)
# case 1. new object
f = Foo()
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], []))
-
+
f.someattr = "hi"
self.assertEquals(attributes.get_history(f._state, 'someattr'), (['hi'], [], []))
f._state.commit(['someattr'])
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], ['hi'], []))
-
+
f.someattr = 'there'
self.assertEquals(attributes.get_history(f._state, 'someattr'), (['there'], [], ['hi']))
del f.someattr
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], ['there']))
-
+
# case 2. object with direct dictionary settings (similar to a load operation)
f = Foo()
f.__dict__['someattr'] = 'new'
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], ['new'], []))
-
+
f.someattr = 'old'
self.assertEquals(attributes.get_history(f._state, 'someattr'), (['old'], [], ['new']))
-
+
f._state.commit(['someattr'])
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], ['old'], []))
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], []))
f.someattr = None
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([None], [], []))
-
+
f = Foo()
f.__dict__['someattr'] = 'new'
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], ['new'], []))
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], ['old'], []))
# setting None on uninitialized is currently not a change for an object attribute
- # (this is different than scalar attribute). a lazyload has occured so if its
+ # (this is different than scalar attribute). a lazyload has occured so if its
# None, its really None
f = Foo()
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [None], []))
class Bar(fixtures.Base):
def __nonzero__(self):
assert False
-
+
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True)
-
+
hi = Bar(name='hi')
there = Bar(name='there')
old = Bar(name='old')
new = Bar(name='new')
-
+
# case 1. new object
f = Foo()
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], []))
f._state.commit(['someattr'])
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [there], []))
-
+
f.someattr = [hi]
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([hi], [], [there]))
pass
from sqlalchemy.orm.collections import attribute_mapped_collection
-
+
attributes.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True, typecallable=attribute_mapped_collection('name'))
f.someattr['there'] = there
self.assertEquals(tuple([set(x) for x in attributes.get_history(f._state, 'someattr')]), (set([hi, there]), set([]), set([])))
-
+
f._state.commit(['someattr'])
self.assertEquals(tuple([set(x) for x in attributes.get_history(f._state, 'someattr')]), (set([]), set([hi, there]), set([])))
f.someattr.remove(there)
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [hi], [there]))
-
+
f.someattr.append(old)
f.someattr.append(new)
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([old, new], [hi], [there]))
f._state.commit(['someattr'])
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [hi, old, new], []))
-
+
f.someattr.pop(0)
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [old, new], [hi]))
-
+
# case 2. object with direct settings (similar to a load operation)
f = Foo()
f.__dict__['id'] = 1
collection.append_without_event(new)
f._state.commit_all()
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [new], []))
-
+
f.id = 1
f.someattr.remove(new)
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([], [], [new]))
-
+
# case 3. mixing appends with sets
f = Foo()
f.someattr.append(hi)
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([hi, there], [], []))
f.someattr = [there]
self.assertEquals(attributes.get_history(f._state, 'someattr'), ([there], [], []))
-
+
def test_collections_via_backref(self):
class Foo(fixtures.Base):
pass
attributes.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, useobject=True)
attributes.register_attribute(Bar, 'foo', uselist=False, extension=attributes.GenericBackrefExtension('bars'), trackparent=True, useobject=True)
-
+
f1 = Foo()
b1 = Bar()
self.assertEquals(attributes.get_history(f1._state, 'bars'), ([], [], []))
self.assertEquals(attributes.get_history(b1._state, 'foo'), ([], [None], []))
-
+
#b1.foo = f1
f1.bars.append(b1)
self.assertEquals(attributes.get_history(f1._state, 'bars'), ([b1], [], []))
self.assertEquals(attributes.get_history(f1._state, 'bars'), ([b1, b2], [], []))
self.assertEquals(attributes.get_history(b1._state, 'foo'), ([f1], [], []))
self.assertEquals(attributes.get_history(b2._state, 'foo'), ([f1], [], []))
-
+
def test_lazy_backref_collections(self):
class Foo(fixtures.Base):
pass
def load():
return lazy_load
return load
-
+
attributes.register_class(Foo)
attributes.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, callable_=lazyload, useobject=True)
bar4 = Bar()
bar4.foo = f
self.assertEquals(attributes.get_history(f._state, 'bars'), ([bar4], [bar1, bar2, bar3], []))
-
+
lazy_load = None
f = Foo()
bar4 = Bar()
lazy_load = [bar1, bar2, bar3]
f._state.expire_attributes(['bars'])
self.assertEquals(attributes.get_history(f._state, 'bars'), ([], [bar1, bar2, bar3], []))
-
+
def test_collections_via_lazyload(self):
class Foo(fixtures.Base):
pass
def load():
return lazy_load
return load
-
+
attributes.register_class(Foo)
attributes.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True, callable_=lazyload, trackparent=True, useobject=True)
-
+
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)]
lazy_load = [bar1, bar2, bar3]
f = Foo()
f.bars = []
self.assertEquals(attributes.get_history(f._state, 'bars'), ([], [], [bar1, bar2, bar3]))
-
+
f = Foo()
f.bars.append(bar4)
self.assertEquals(attributes.get_history(f._state, 'bars'), ([bar4], [bar1, bar2, bar3], []) )
f = Foo()
del f.bars[1]
self.assertEquals(attributes.get_history(f._state, 'bars'), ([], [bar1, bar3], [bar2]))
-
+
lazy_load = None
f = Foo()
f.bars.append(bar2)
attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, useobject=False)
lazy_load = "hi"
- # with scalar non-object, the lazy callable is only executed on gets, not history
+ # with scalar non-object, the lazy callable is only executed on gets, not history
# operations
-
+
f = Foo()
self.assertEquals(f.bar, "hi")
self.assertEquals(attributes.get_history(f._state, 'bar'), ([], ["hi"], []))
self.assertEquals(attributes.get_history(f._state, 'bar'), ([], [], ["hi"]))
assert f.bar is None
self.assertEquals(attributes.get_history(f._state, 'bar'), ([None], [], ["hi"]))
-
+
def test_scalar_object_via_lazyload(self):
class Foo(fixtures.Base):
pass
def load():
return lazy_load
return load
-
+
attributes.register_class(Foo)
attributes.register_class(Bar)
attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, trackparent=True, useobject=True)
bar1, bar2 = [Bar(id=1), Bar(id=2)]
lazy_load = bar1
- # with scalar object, the lazy callable is only executed on gets and history
+ # with scalar object, the lazy callable is only executed on gets and history
# operations
f = Foo()
self.assertEquals(attributes.get_history(f._state, 'bar'), ([], [bar1], []))
-
+
f = Foo()
f.bar = None
self.assertEquals(attributes.get_history(f._state, 'bar'), ([None], [], [bar1]))
self.assertEquals(attributes.get_history(f._state, 'bar'), ([bar2], [], [bar1]))
f.bar = bar1
self.assertEquals(attributes.get_history(f._state, 'bar'), ([], [bar1], []))
-
+
f = Foo()
self.assertEquals(f.bar, bar1)
del f.bar
self.assertEquals(attributes.get_history(f._state, 'bar'), ([None], [], [bar1]))
assert f.bar is None
self.assertEquals(attributes.get_history(f._state, 'bar'), ([None], [], [bar1]))
-
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
def setUpAll(self):
global ctx, data, metadata, User, Pref, Extra
ctx = SessionContext(create_session)
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
extra = Table("extra", metadata,
Column("extra_id", Integer, Sequence("extra_id_seq", optional=True), primary_key=True),
Column("prefs_id", Integer, ForeignKey("prefs.prefs_id"))
class M2MCascadeTest(AssertMixin):
def setUpAll(self):
global metadata, a, b, atob
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
a = Table('a', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30))
def setUpAll(self):
global metadata, address_table, businesses, homes
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
address_table = Table('addresses', metadata,
Column('address_id', Integer, primary_key=True),
Column('street', String(30)),
def setUpAll(self):
global metadata, table_a, table_b
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table_a = Table('a', metadata,
Column('id', Integer, primary_key=True),
Column('foo', String(30)))
assert table_b.count().scalar() == 3
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
import sqlalchemy.exceptions as exceptions
from sqlalchemy.orm import create_session, mapper, relation, \
self._test_composite_mapped(collection_class)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
"""test various mapper compilation scenarios"""
def tearDown(self):
clear_mappers()
-
+
def testone(self):
global metadata, order, employee, product, tax, orderproduct
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
- order = Table('orders', metadata,
+ order = Table('orders', metadata,
Column('id', Integer, primary_key=True),
Column('employee_id', Integer, ForeignKey('employees.id'), nullable=False),
Column('type', Unicode(16)))
order_join = order.select().alias('pjoin')
- order_mapper = mapper(Order, order,
- select_table=order_join,
- polymorphic_on=order_join.c.type,
+ order_mapper = mapper(Order, order,
+ select_table=order_join,
+ polymorphic_on=order_join.c.type,
polymorphic_identity='order',
properties={
'orderproducts': relation(OrderProduct, lazy=True, backref='order')}
'orders': relation(Order, lazy=True, backref='employee')})
mapper(OrderProduct, orderproduct)
-
+
# this requires that the compilation of order_mapper's "surrogate mapper" occur after
# the initial setup of MapperProperty objects on the mapper.
class_mapper(Product).compile()
def testtwo(self):
"""test that conflicting backrefs raises an exception"""
global metadata, order, employee, product, tax, orderproduct
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
- order = Table('orders', metadata,
+ order = Table('orders', metadata,
Column('id', Integer, primary_key=True),
Column('type', Unicode(16)))
order_join = order.select().alias('pjoin')
- order_mapper = mapper(Order, order,
- select_table=order_join,
- polymorphic_on=order_join.c.type,
+ order_mapper = mapper(Order, order,
+ select_table=order_join,
+ polymorphic_on=order_join.c.type,
polymorphic_identity='order',
properties={
'orderproducts': relation(OrderProduct, lazy=True, backref='product')}
assert str(e).index("Error creating backref ") > -1
def testthree(self):
- metadata = MetaData(testbase.db)
- node_table = Table("node", metadata,
+ metadata = MetaData(testing.db)
+ node_table = Table("node", metadata,
Column('node_id', Integer, primary_key=True),
Column('name_index', Integer, nullable=True),
)
- node_name_table = Table("node_name", metadata,
+ node_name_table = Table("node_name", metadata,
Column('node_name_id', Integer, primary_key=True),
Column('node_id', Integer, ForeignKey('node.node_id')),
Column('host_id', Integer, ForeignKey('host.host_id')),
class Node(object):pass
class NodeName(object):pass
class Host(object):pass
-
+
node_mapper = mapper(Node, node_table)
host_mapper = mapper(Host, host_table)
node_name_mapper = mapper(NodeName, node_name_table,
def testfour(self):
meta = MetaData()
-
+
a = Table('a', meta, Column('id', Integer, primary_key=True))
b = Table('b', meta, Column('id', Integer, primary_key=True), Column('a_id', Integer, ForeignKey('a.id')))
mapper(B, b, properties={
'a':relation(A, backref='b')
})
-
+
try:
compile_mappers()
assert False
assert str(e).index("Error creating backref") > -1
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
"""tests a self-referential mapper, with an additional list of child objects."""
def setUpAll(self):
global t1, t2, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
Column('c1', Integer, Sequence('t1c1_id_seq', optional=True), primary_key=True),
Column('parent_c1', Integer, ForeignKey('t1.c1')),
"""test self-referential relationship that joins on a column other than the primary key column"""
def setUpAll(self):
global table, meta
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
table = Table('item', meta,
Column('id', Integer, primary_key=True),
Column('uuid', String(32), unique=True, nullable=False),
class InheritTestOne(AssertMixin):
def setUpAll(self):
global parent, child1, child2, meta
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
parent = Table("parent", meta,
Column("id", Integer, primary_key=True),
Column("parent_data", String(50)),
"""tests two mappers with a one-to-many relation to each other."""
def setUpAll(self):
global t1, t2, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
Column('c1', Integer, Sequence('t1c1_id_seq', optional=True), primary_key=True),
Column('c2', Integer, ForeignKey('t2.c1'))
"""tests two mappers with a one-to-many relation to each other, with a second one-to-many on one of the mappers"""
def setUpAll(self):
global t1, t2, t3, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
Column('c1', Integer, Sequence('t1c1_id_seq', optional=True), primary_key=True),
Column('c2', Integer, ForeignKey('t2.c1')),
raise an exception when dependencies are sorted."""
def setUpAll(self):
global metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
global person
global ball
ball = Table('ball', metadata,
sess.save(b)
sess.save(p)
- self.assert_sql(testbase.db, lambda: sess.flush(), [
+ self.assert_sql(testing.db, lambda: sess.flush(), [
(
"INSERT INTO person (favorite_ball_id, data) VALUES (:favorite_ball_id, :data)",
{'favorite_ball_id': None, 'data':'some data'}
)
])
sess.delete(p)
- self.assert_sql(testbase.db, lambda: sess.flush(), [
+ self.assert_sql(testing.db, lambda: sess.flush(), [
# heres the post update (which is a pre-update with deletes)
(
"UPDATE person SET favorite_ball_id=:favorite_ball_id WHERE person.id = :person_id",
sess = create_session()
[sess.save(x) for x in [b,p,b2,b3,b4]]
- self.assert_sql(testbase.db, lambda: sess.flush(), [
+ self.assert_sql(testing.db, lambda: sess.flush(), [
(
"INSERT INTO ball (person_id, data) VALUES (:person_id, :data)",
{'person_id':None, 'data':'some data'}
])
sess.delete(p)
- self.assert_sql(testbase.db, lambda: sess.flush(), [
+ self.assert_sql(testing.db, lambda: sess.flush(), [
(
"UPDATE ball SET person_id=:person_id WHERE ball.id = :ball_id",
lambda ctx:{'person_id': None, 'ball_id': b.id}
"""test using post_update on a single self-referential mapper"""
def setUpAll(self):
global metadata, node_table
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
node_table = Table('node', metadata,
Column('id', Integer, Sequence('nodeid_id_seq', optional=True), primary_key=True),
Column('path', String(50), nullable=False),
remove_child(root, cats)
# pre-trigger lazy loader on 'cats' to make the test easier
cats.children
- self.assert_sql(testbase.db, lambda: session.flush(), [
+ self.assert_sql(testing.db, lambda: session.flush(), [
(
"UPDATE node SET prev_sibling_id=:prev_sibling_id WHERE node.id = :node_id",
lambda ctx:{'prev_sibling_id':about.id, 'node_id':stories.id}
class SelfReferentialPostUpdateTest2(AssertMixin):
def setUpAll(self):
global metadata, a_table
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
a_table = Table("a", metadata,
Column("id", Integer(), primary_key=True),
Column("fui", String(128)),
assert f2.foo is f1
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import operator
from sqlalchemy import *
from sqlalchemy.orm import *
class DynamicTest(FixtureTest):
keep_mappers = False
refresh_data = True
-
+
def test_basic(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
sess = create_session()
u = sess.query(User).first()
assert u.addresses.count() == 1, u.addresses.count()
-
+
def test_backref(self):
mapper(Address, addresses, properties={
'user':relation(User, backref=backref('addresses', lazy='dynamic'))
})
mapper(User, users)
-
+
sess = create_session()
ad = sess.query(Address).get(1)
def go():
ad.user = None
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.flush()
u = sess.query(User).get(7)
assert ad not in u.addresses
-
+
def test_no_count(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
# result), else additional count() queries are issued when evaluating in a list context
def go():
assert [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])] == q.filter(User.id==7).all()
- self.assert_sql_count(testbase.db, go, 2)
-
+ self.assert_sql_count(testing.db, go, 2)
+
def test_m2m(self):
mapper(Order, orders, properties={
'items':relation(Item, secondary=order_items, lazy="dynamic", backref=backref('orders', lazy="dynamic"))
})
mapper(Item, items)
-
+
sess = create_session()
o1 = Order(id=15, description="order 10")
i1 = Item(id=10, description="item 8")
o1.items.append(i1)
sess.save(o1)
sess.flush()
-
+
assert o1 in i1.orders.all()
assert i1 in o1.items.all()
-
+
class FlushTest(FixtureTest):
def test_basic(self):
class Fixture(Base):
sess.delete(u.addresses[4])
sess.delete(u.addresses[3])
assert [Address(email_address='a'), Address(email_address='b'), Address(email_address='d')] == list(u.addresses)
-
+
sess.delete(u)
-
+
# u.addresses relation will have to force the load
# of all addresses so that they can be updated
sess.flush()
sess.close()
-
- assert testbase.db.scalar(addresses.count(addresses.c.user_id != None)) ==0
+
+ assert testing.db.scalar(addresses.count(addresses.c.user_id != None)) ==0
@testing.fails_on('maxdb')
def test_remove_orphans(self):
create_backref_test(autoflush, saveuser)
if __name__ == '__main__':
- testbase.main()
-
+ testenv.main()
"""basic tests of eager loaded attributes"""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
assert a.user_id==7
# assert that the eager loader added 'user_id' to the row
# and deferred loading of that col was disabled
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
# do the mapping in reverse
# (we would have just used an "addresses" backref but the test fixtures then require the whole
assert u.addresses[0].user_id==7
# assert that the eager loader didn't have to affect 'user_id' here
# and that its still deferred
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
clear_mappers()
def go():
u = sess.query(User).limit(1).get(8)
assert User(id=8, addresses=[Address(id=2, dingalings=[Dingaling(id=1)]), Address(id=3), Address(id=4)]) == u
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_many_to_many(self):
q = create_session().query(Item)
def go():
assert fixtures.item_keyword_result == q.all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def go():
assert fixtures.item_keyword_result[0:2] == q.join('keywords').filter(keywords.c.name == 'red').all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_eager_option(self):
def go():
assert fixtures.item_keyword_result[0:2] == q.options(eagerload('keywords')).join('keywords').filter(keywords.c.name == 'red').all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_cyclical(self):
"""test that a circular eager relationship breaks the cycle with a lazy loader"""
User(id=10)
] == q.all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_double_same_mappers(self):
"""tests eager loading with two relations simulatneously, from the same table, using aliases. """
User(id=10)
] == q.all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_no_false_hits(self):
"""test that eager loaders don't interpret main table columns as part of their eager load."""
def go():
l = q.filter(s.c.u2_id==User.c.id).distinct().all()
assert fixtures.user_address_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
@testing.fails_on('maxdb')
def test_limit_2(self):
def go():
l = q.filter(users.c.id == 7).all()
assert [User(id=7, address=Address(id=1))] == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
@testing.fails_on('maxdb')
def test_many_to_one(self):
assert a.user is not None
u1 = sess.query(User).get(7)
assert a.user is u1
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_one_and_many(self):
def go():
assert fixtures.user_order_result[0:3] == l.all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_double_with_aggregate(self):
),
User(id=10),
] == q.all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_wide(self):
mapper(Order, orders, properties={'items':relation(Item, secondary=order_items, lazy=False, order_by=items.c.id)})
def go():
ret = sess.query(User).add_entity(Order).join('orders', aliased=True).order_by(User.id).order_by(Order.id).all()
self.assertEquals(ret, self._assert_result())
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_options(self):
mapper(User, users, properties={
def go():
ret = sess.query(User).options(eagerload('addresses')).add_entity(Order).join('orders', aliased=True).order_by(User.id).order_by(Order.id).all()
self.assertEquals(ret, self._assert_result())
- self.assert_sql_count(testbase.db, go, 6)
+ self.assert_sql_count(testing.db, go, 6)
sess.clear()
def go():
ret = sess.query(User).options(eagerload('addresses')).add_entity(Order).options(eagerload('items', Order)).join('orders', aliased=True).order_by(User.id).order_by(Order.id).all()
self.assertEquals(ret, self._assert_result())
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
class SelfReferentialEagerTest(ORMTest):
def define_tables(self, metadata):
]),
Node(data='n13')
]) == d
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_lazy_fallback_doesnt_affect_eager(self):
Node(data='n122'),
Node(data='n123')
] == list(n12.children)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_with_deferred(self):
class Node(Base):
def go():
assert Node(data='n1', children=[Node(data='n11'), Node(data='n12')]) == sess.query(Node).first()
- self.assert_sql_count(testbase.db, go, 4)
+ self.assert_sql_count(testing.db, go, 4)
sess.clear()
def go():
assert Node(data='n1', children=[Node(data='n11'), Node(data='n12')]) == sess.query(Node).options(undefer('data')).first()
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
sess.clear()
def go():
assert Node(data='n1', children=[Node(data='n11'), Node(data='n12')]) == sess.query(Node).options(undefer('data'), undefer('children.data')).first()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
]),
Node(data='n13')
]) == d
- self.assert_sql_count(testbase.db, go, 2)
+ self.assert_sql_count(testing.db, go, 2)
def go():
d = sess.query(Node).filter_by(data='n1').options(eagerload('children.children')).first()
# testing only sqlite for now since the query text is slightly different on other
# dialects
if testing.against('sqlite'):
- self.assert_sql(testbase.db, go, [
+ self.assert_sql(testing.db, go, [
(
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, nodes.data AS nodes_data FROM nodes WHERE nodes.data = :nodes_data_1 ORDER BY nodes.oid LIMIT 1 OFFSET 0",
{'nodes_data_1': 'n1'}
]),
Node(data='n13')
]) == d
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
class SelfReferentialM2MEagerTest(ORMTest):
def define_tables(self, metadata):
create_session().query(SubT).all()
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.sessioncontext import SessionContext
@testing.uses_deprecated('SessionContext')
def setUpAll(self):
global user1, user2, address1, address2, metadata, ctx
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
ctx = SessionContext(create_session)
user1 = Table('user1', metadata,
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
"""test attribute/instance expiration, deferral of attributes, etc."""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
class ExpireTest(FixtureTest):
keep_mappers = False
refresh_data = True
-
+
def test_expire(self):
mapper(User, users, properties={
'addresses':relation(Address, backref='user'),
})
mapper(Address, addresses)
-
+
sess = create_session()
u = sess.query(User).get(7)
assert len(u.addresses) == 1
u.name = 'foo'
del u.addresses[0]
sess.expire(u)
-
+
assert 'name' not in u.__dict__
-
+
def go():
assert u.name == 'jack'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
assert 'name' in u.__dict__
u.name = 'foo'
def go():
assert u.name == 'jack'
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def test_expire_doesntload_on_set(self):
mapper(User, users)
-
+
sess = create_session()
u = sess.query(User).get(7)
-
+
sess.expire(u, attribute_names=['name'])
def go():
u.name = 'somenewname'
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
sess.flush()
sess.clear()
assert sess.query(User).get(7).name == 'somenewname'
-
+
def test_no_session(self):
mapper(User, users)
sess = create_session()
u = sess.query(User).get(7)
-
+
sess.expire(u, attribute_names=['name'])
sess.expunge(u)
try:
u.name
except exceptions.InvalidRequestError, e:
assert str(e) == "Instance <class 'testlib.fixtures.User'> is not bound to a Session, and no contextual session is established; attribute refresh operation cannot proceed"
-
+
def test_expire_preserves_changes(self):
"""test that the expire load operation doesn't revert post-expire changes"""
-
+
mapper(Order, orders)
sess = create_session()
o = sess.query(Order).get(3)
sess.expire(o)
-
+
o.description = "order 3 modified"
def go():
assert o.isopen == 1
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
assert o.description == 'order 3 modified'
del o.description
assert "description" not in o.__dict__
assert o.description is None
-
+
o.isopen=15
sess.expire(o, ['isopen', 'description'])
o.description = 'some new description'
sess.query(Order).all()
assert o.isopen == 1
assert o.description == 'some new description'
-
+
if False:
# NOTYET: need to implement unconditional population
# of expired attriutes in mapper._instances()
del o.isopen
def go():
assert o.isopen is None
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def test_expire_committed(self):
"""test that the committed state of the attribute receives the most recent DB data"""
mapper(Order, orders)
-
+
sess = create_session()
o = sess.query(Order).get(3)
sess.expire(o)
assert o._state.dict['description'] == 'order 3 modified'
def go():
sess.flush()
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def test_expire_cascade(self):
mapper(User, users, properties={
'addresses':relation(Address, cascade="all, refresh-expire")
def go():
assert u.addresses[0].email_address == 'jack@bean.com'
assert u.name == 'jack'
- # two loads
- self.assert_sql_count(testbase.db, go, 2)
+ # two loads
+ self.assert_sql_count(testing.db, go, 2)
assert 'name' in u.__dict__
assert 'addresses' in u.__dict__
def go():
assert u.addresses[0].email_address == 'jack@bean.com'
assert u.name == 'jack'
- # two loads, since relation() + scalar are
+ # two loads, since relation() + scalar are
# separate right now
- self.assert_sql_count(testbase.db, go, 2)
+ self.assert_sql_count(testing.db, go, 2)
assert 'name' in u.__dict__
assert 'addresses' in u.__dict__
sess = create_session()
o = sess.query(Order).get(3)
-
+
sess.expire(o, attribute_names=['description'])
assert 'id' in o.__dict__
assert 'description' not in o.__dict__
assert o._state.dict['isopen'] == 1
-
+
orders.update(orders.c.id==3).execute(description='order 3 modified')
-
+
def go():
assert o.description == 'order 3 modified'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
assert o._state.dict['description'] == 'order 3 modified'
-
+
o.isopen = 5
sess.expire(o, attribute_names=['description'])
assert 'id' in o.__dict__
assert 'description' not in o.__dict__
assert o.__dict__['isopen'] == 5
assert o._state.committed_state['isopen'] == 1
-
+
def go():
assert o.description == 'order 3 modified'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
assert o.__dict__['isopen'] == 5
assert o._state.dict['description'] == 'order 3 modified'
assert o._state.committed_state['isopen'] == 1
sess.flush()
-
+
sess.expire(o, attribute_names=['id', 'isopen', 'description'])
assert 'id' not in o.__dict__
assert 'isopen' not in o.__dict__
assert o.description == 'order 3 modified'
assert o.id == 3
assert o.isopen == 5
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_partial_expire_lazy(self):
mapper(User, users, properties={
sess = create_session()
u = sess.query(User).get(8)
-
+
sess.expire(u, ['name', 'addresses'])
assert 'name' not in u.__dict__
assert 'addresses' not in u.__dict__
-
+
# hit the lazy loader. just does the lazy load,
# doesnt do the overall refresh
def go():
assert u.addresses[0].email_address=='ed@wood.com'
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
assert 'name' not in u.__dict__
-
- # check that mods to expired lazy-load attributes
+
+ # check that mods to expired lazy-load attributes
# only do the lazy load
sess.expire(u, ['name', 'addresses'])
def go():
u.addresses = [Address(id=10, email_address='foo@bar.com')]
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
sess.flush()
-
- # flush has occurred, and addresses was modified,
+
+ # flush has occurred, and addresses was modified,
# so the addresses collection got committed and is
# longer expired
def go():
assert u.addresses[0].email_address=='foo@bar.com'
assert len(u.addresses) == 1
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
# but the name attribute was never loaded and so
# still loads
def go():
assert u.name == 'ed'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_partial_expire_eager(self):
mapper(User, users, properties={
def go():
assert u.addresses[0].email_address=='ed@wood.com'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
- # check that mods to expired eager-load attributes
+ # check that mods to expired eager-load attributes
# do the refresh
sess.expire(u, ['name', 'addresses'])
def go():
u.addresses = [Address(id=10, email_address='foo@bar.com')]
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.flush()
# this should ideally trigger the whole load
def go():
assert u.addresses[0].email_address=='foo@bar.com'
assert len(u.addresses) == 1
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def go():
assert u.name == 'ed'
# scalar attributes have their own load
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
# ideally, this was already loaded, but we arent
# doing it that way right now
- #self.assert_sql_count(testbase.db, go, 0)
+ #self.assert_sql_count(testing.db, go, 0)
def test_partial_expire_deferred(self):
mapper(Order, orders, properties={
'description':deferred(orders.c.description)
})
-
+
sess = create_session()
o = sess.query(Order).get(3)
sess.expire(o, ['description', 'isopen'])
assert 'isopen' not in o.__dict__
assert 'description' not in o.__dict__
-
+
# test that expired attribute access refreshes
# the deferred
def go():
assert o.isopen == 1
assert o.description == 'order 3'
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
sess.expire(o, ['description', 'isopen'])
assert 'isopen' not in o.__dict__
assert 'description' not in o.__dict__
def go():
assert o.description == 'order 3'
assert o.isopen == 1
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
clear_mappers()
-
+
mapper(Order, orders)
sess.clear()
# sanity check
def go():
assert o.description == 'order 3'
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
assert 'description' in o.__dict__
assert 'isopen' in o.__dict__
sess.expire(o, ['description', 'isopen'])
assert 'isopen' not in o.__dict__
assert 'description' not in o.__dict__
-
+
# test that expired attribute access refreshes
# the deferred
def go():
assert o.isopen == 1
assert o.description == 'order 3'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.expire(o, ['description', 'isopen'])
assert 'isopen' not in o.__dict__
def go():
assert o.description == 'order 3'
assert o.isopen == 1
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
class RefreshTest(FixtureTest):
keep_mappers = False
# username is back to the DB
assert u.name == 'jack'
-
+
assert id(a) not in [id(x) for x in u.addresses]
u.name = 'foo'
assert 'name' not in u.__dict__
s.refresh(u)
assert u.name == 'jack'
-
+
def test_refresh_with_lazy(self):
- """test that when a lazy loader is set as a trigger on an object's attribute
- (at the attribute level, not the class level), a refresh() operation doesnt
+ """test that when a lazy loader is set as a trigger on an object's attribute
+ (at the attribute level, not the class level), a refresh() operation doesnt
fire the lazy loader or create any problems"""
-
+
s = create_session()
mapper(User, users, properties={'addresses':relation(mapper(Address, addresses))})
q = s.query(User).options(lazyload('addresses'))
u = q.filter(users.c.id==8).first()
def go():
s.refresh(u)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_refresh_with_eager(self):
"""test that a refresh/expire operation loads rows properly and sends correct "isnew" state to eager loaders"""
-
+
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses), lazy=False)
})
-
+
s = create_session()
u = s.get(User, 8)
assert len(u.addresses) == 3
mapper(Address, addresses)
mapper(User, users, properties = dict(addresses=relation(Address,cascade="all, delete-orphan",lazy=False)) )
-
+
u=User()
u.name='Justin'
a = Address(id=10, email_address='lala')
u.addresses.append(a)
-
+
s.save(u)
s.flush()
s.clear()
s.refresh(u)
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy import exceptions
class GenerativeQueryTest(PersistTest):
def setUpAll(self):
global foo, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
foo = Table('foo', metadata,
Column('id', Integer, Sequence('foo_id_seq'), primary_key=True),
Column('bar', Integer),
mapper(Foo, foo)
metadata.create_all()
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
for i in range(100):
sess.save(Foo(bar=i, range=i%10))
sess.flush()
clear_mappers()
def test_selectby(self):
- res = create_session(bind=testbase.db).query(Foo).filter_by(range=5)
+ res = create_session(bind=testing.db).query(Foo).filter_by(range=5)
assert res.order_by([Foo.c.bar])[0].bar == 5
assert res.order_by([desc(Foo.c.bar)])[0].bar == 95
@testing.unsupported('mssql')
@testing.fails_on('maxdb')
def test_slice(self):
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
query = sess.query(Foo)
orig = query.all()
assert query[1] == orig[1]
assert query[10:20][5] == orig[10:20][5]
def test_aggregate(self):
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
query = sess.query(Foo)
assert query.count() == 100
assert query.filter(foo.c.bar<30).min(foo.c.bar) == 0
def test_aggregate_1(self):
if (testing.against('mysql') and
- testbase.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')):
+ testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')):
return
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert query.filter(foo.c.bar<30).sum(foo.c.bar) == 435
@testing.fails_on('postgres', 'mysql', 'firebird', 'mssql')
def test_aggregate_2(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert query.filter(foo.c.bar<30).avg(foo.c.bar) == 14.5
@testing.fails_on_everything_except('sqlite', 'postgres', 'mysql',
'firebird', 'mssql')
def test_aggregate_2_int(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert int(query.filter(foo.c.bar<30).avg(foo.c.bar)) == 14
@testing.fails_on('postgres', 'mysql', 'firebird', 'mssql')
def test_aggregate_3(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert query.filter(foo.c.bar<30).apply_avg(foo.c.bar).first() == 14.5
assert query.filter(foo.c.bar<30).apply_avg(foo.c.bar).one() == 14.5
def test_filter(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert query.count() == 100
assert query.filter(Foo.c.bar < 30).count() == 30
res2 = query.filter(Foo.c.bar < 30).filter(Foo.c.bar > 10)
assert res2.count() == 19
def test_options(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
class ext1(MapperExtension):
def populate_instance(self, mapper, selectcontext, row, instance, **flags):
instance.TEST = "hello world"
assert query.options(extension(ext1()))[0].TEST == "hello world"
def test_order_by(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert query.order_by([Foo.c.bar])[0].bar == 0
assert query.order_by([desc(Foo.c.bar)])[0].bar == 99
def test_offset(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert list(query.order_by([Foo.c.bar]).offset(10))[0].bar == 10
def test_offset(self):
- query = create_session(bind=testbase.db).query(Foo)
+ query = create_session(bind=testing.db).query(Foo)
assert len(list(query.limit(10))) == 10
class Obj1(object):
)
mapper(Obj1, table1)
mapper(Obj2, table2)
- metadata.create_all(bind=testbase.db)
- testbase.db.execute(table1.insert(), {'id':1},{'id':2},{'id':3},{'id':4})
- testbase.db.execute(table2.insert(), {'num':1,'t1id':1},{'num':2,'t1id':1},{'num':3,'t1id':1},\
+ metadata.create_all(bind=testing.db)
+ testing.db.execute(table1.insert(), {'id':1},{'id':2},{'id':3},{'id':4})
+ testing.db.execute(table2.insert(), {'num':1,'t1id':1},{'num':2,'t1id':1},{'num':3,'t1id':1},\
{'num':4,'t1id':2},{'num':5,'t1id':2},{'num':6,'t1id':3})
def tearDownAll(self):
- metadata.drop_all(bind=testbase.db)
+ metadata.drop_all(bind=testing.db)
clear_mappers()
def test_distinctcount(self):
- query = create_session(bind=testbase.db).query(Obj1)
+ query = create_session(bind=testing.db).query(Obj1)
assert query.count() == 4
res = query.filter(and_(table1.c.id==table2.c.t1id,table2.c.t1id==1))
assert res.count() == 3
'items':relation(mapper(tables.Item, tables.orderitems))
}))
})
- session = create_session(bind=testbase.db)
+ session = create_session(bind=testing.db)
query = session.query(tables.User)
x = query.join(['orders', 'items']).filter(tables.Item.c.item_id==2)
print x.compile()
'items':relation(mapper(tables.Item, tables.orderitems))
}))
})
- session = create_session(bind=testbase.db)
+ session = create_session(bind=testing.db)
query = session.query(tables.User)
x = query.outerjoin(['orders', 'items']).filter(or_(tables.Order.c.order_id==None,tables.Item.c.item_id==2))
print x.compile()
'items':relation(mapper(tables.Item, tables.orderitems))
}))
})
- session = create_session(bind=testbase.db)
+ session = create_session(bind=testing.db)
query = session.query(tables.User)
x = query.outerjoin(['orders', 'items']).filter(or_(tables.Order.c.order_id==None,tables.Item.c.item_id==2)).count()
assert x==2
'items':relation(mapper(tables.Item, tables.orderitems))
}))
})
- session = create_session(bind=testbase.db)
+ session = create_session(bind=testing.db)
query = session.query(tables.User)
x = query.select_from(tables.users.outerjoin(tables.orders).outerjoin(tables.orderitems)).\
filter(or_(tables.Order.c.order_id==None,tables.Item.c.item_id==2))
class CaseSensitiveTest(PersistTest):
def setUpAll(self):
global metadata, table1, table2
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table1 = Table('Table1', metadata,
Column('ID', Integer, primary_key=True),
)
clear_mappers()
def test_distinctcount(self):
- q = create_session(bind=testbase.db).query(Obj1)
+ q = create_session(bind=testing.db).query(Obj1)
assert q.count() == 4
res = q.filter(and_(table1.c.ID==table2.c.T1ID,table2.c.T1ID==1))
assert res.count() == 3
def test_noautojoin(self):
class T(object):pass
mapper(T, t1, properties={'children':relation(T)})
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
try:
sess.query(T).join('children').select_by(id=7)
assert False
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.orm.sync import ONETOMANY, MANYTOONE
from testlib import *
+
def produce_test(parent, child, direction):
"""produce a testcase for A->B->C inheritance with a self-referential
relationship between two of the classes, using either one-to-many or
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions, util
from sqlalchemy.orm import *
class ABCTest(ORMTest):
def define_tables(self, metadata):
global a, b, c
- a = Table('a', metadata,
+ a = Table('a', metadata,
Column('id', Integer, primary_key=True),
Column('adata', String(30)),
Column('type', String(30)),
b = Table('b', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True),
Column('bdata', String(30)))
- c = Table('c', metadata,
+ c = Table('c', metadata,
Column('id', Integer, ForeignKey('b.id'), primary_key=True),
Column('cdata', String(30)))
-
+
def make_test(fetchtype):
def test_roundtrip(self):
class A(fixtures.Base):pass
class B(A):pass
class C(B):pass
-
+
if fetchtype == 'union':
abc = a.outerjoin(b).outerjoin(c)
bc = a.join(b).outerjoin(c)
else:
abc = bc = None
-
+
mapper(A, a, select_table=abc, polymorphic_on=a.c.type, polymorphic_identity='a', polymorphic_fetch=fetchtype)
mapper(B, b, select_table=bc, inherits=A, polymorphic_identity='b', polymorphic_fetch=fetchtype)
mapper(C, c, inherits=B, polymorphic_identity='c')
-
+
a1 = A(adata='a1')
b1 = B(bdata='b1', adata='b1')
b2 = B(bdata='b2', adata='b2')
c1 = C(cdata='c1', bdata='c1', adata='c1')
c2 = C(cdata='c2', bdata='c2', adata='c2')
c3 = C(cdata='c2', bdata='c2', adata='c2')
-
+
sess = create_session()
for x in (a1, b1, b2, b3, c1, c2, c3):
sess.save(x)
sess.flush()
sess.clear()
-
+
#for obj in sess.query(A).all():
# print obj
assert [
test_roundtrip.__name__ = 'test_%s' % fetchtype
return test_roundtrip
-
+
test_union = make_test('union')
test_select = make_test('select')
test_deferred = make_test('deferred')
-
-
+
+
if __name__ == '__main__':
- testbase.main()
-
\ No newline at end of file
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
def suite():
'orm.inheritance.productspec',
'orm.inheritance.magazine',
'orm.inheritance.selects',
-
+
)
alltests = unittest.TestSuite()
for name in modules_to_test:
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions, util
from sqlalchemy.orm import *
assert sess.query(Bar).get(bl.id) == bl
assert sess.query(Blub).get(bl.id) == bl
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
else:
# this is testing the 'wrong' behavior of using get()
# polymorphically with mappers that are not configured to be
assert sess.query(Blub).get(bl.id) == bl
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
test_get.__name__ = name
return test_get
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
def go():
c2 = session.query(Company).get(c.id)
assert set([repr(x) for x in c2.employees]) == set(["Engineer Kurt knows how to hack", "Manager Tom knows how to manage things"])
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
class PageSize(BaseObject):
def __repr__(self):
return "%s(%sx%s, %s)" % (self.__class__.__name__, self.width, self.height, self.name)
-
+
class Magazine(BaseObject):
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__, repr(self.location), repr(self.size))
page2 = MagazinePage(magazine=magazine,page_no=2)
page3 = ClassifiedPage(magazine=magazine,page_no=3)
session.save(pub)
-
+
session.flush()
print [x for x in session]
session.clear()
print p.issues[0].locations[0].magazine.pages
print [page, page2, page3]
assert repr(p.issues[0].locations[0].magazine.pages) == repr([page, page2, page3]), repr(p.issues[0].locations[0].magazine.pages)
-
+
test_roundtrip.__name__ = "test_%s" % (not use_union and (use_joins and "joins" or "select") or "unions")
setattr(MagazineTest, test_roundtrip.__name__, test_roundtrip)
-
+
for (use_union, use_join) in [(True, False), (False, True), (False, False)]:
generate_round_trip_test(use_union, use_join)
-
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
assert original == forwards == backwards
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
"""tests basic polymorphic mapper loading/saving, minimal relations"""
-import testbase
+import testenv; testenv.configure_for_tests()
import sets
from sqlalchemy import *
from sqlalchemy.orm import *
if not lazy_relation:
if polymorphic_fetch=='union':
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
else:
- self.assert_sql_count(testbase.db, go, 5)
+ self.assert_sql_count(testing.db, go, 5)
else:
if polymorphic_fetch=='union':
- self.assert_sql_count(testbase.db, go, 2)
+ self.assert_sql_count(testing.db, go, 2)
else:
- self.assert_sql_count(testbase.db, go, 6)
+ self.assert_sql_count(testing.db, go, 6)
# test selecting from the query, using the base mapped table (people) as the selection criterion.
# in the case of the polymorphic Person query, the "people" selectable should be adapted to be "person_join"
generate_round_trip_test(include_base, lazy_relation, redefine_colprop, use_literal_join, polymorphic_fetch, False)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions, util
from sqlalchemy.orm import *
def go():
testcar = session.query(Car).options(eagerload('employee')).get(car1.car_id)
assert str(testcar.employee) == "Engineer E4, status X"
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
session.clear()
s = session.query(Car)
# +--------------------------------------- has a ------+
global metadata, status, people, engineers, managers, cars
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
# table definitions
status = Table('status', metadata,
Column('status_id', Integer, primary_key=True),
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from datetime import datetime
from sqlalchemy import *
from sqlalchemy.orm import *
assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
# TODO: under construction !
-import testbase
+import testenv; testenv.configure_for_tests()
import sets
from sqlalchemy import *
from sqlalchemy.orm import *
class Company(fixtures.Base):
pass
-
+
class Person(fixtures.Base):
pass
class Engineer(Person):
class PolymorphicQueryTest(ORMTest):
keep_data = True
keep_mappers = True
-
+
def define_tables(self, metadata):
global companies, people, engineers, managers, boss, paperwork
-
- companies = Table('companies', metadata,
+
+ companies = Table('companies', metadata,
Column('company_id', Integer, Sequence('company_id_seq', optional=True), primary_key=True),
Column('name', String(50)))
- people = Table('people', metadata,
+ people = Table('people', metadata,
Column('person_id', Integer, Sequence('person_id_seq', optional=True), primary_key=True),
Column('company_id', Integer, ForeignKey('companies.company_id')),
Column('name', String(50)),
Column('type', String(30)))
- engineers = Table('engineers', metadata,
+ engineers = Table('engineers', metadata,
Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
Column('status', String(30)),
Column('engineer_name', String(50)),
Column('primary_language', String(50)),
)
- managers = Table('managers', metadata,
+ managers = Table('managers', metadata,
Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
Column('status', String(30)),
Column('manager_name', String(50))
)
- boss = Table('boss', metadata,
+ boss = Table('boss', metadata,
Column('boss_id', Integer, ForeignKey('managers.person_id'), primary_key=True),
Column('golf_swing', String(30)),
)
- paperwork = Table('paperwork', metadata,
+ paperwork = Table('paperwork', metadata,
Column('paperwork_id', Integer, primary_key=True),
- Column('description', String(50)),
+ Column('description', String(50)),
Column('person_id', Integer, ForeignKey('people.person_id')))
-
- # create the most awkward polymorphic selects possible;
+
+ # create the most awkward polymorphic selects possible;
# the union does not include the "people" table by itself nor does it have
# "people.person_id" directly in it, and it also does not include at all
# the "boss" table
'engineer':people.join(engineers),
'manager':people.join(managers),
}, None, 'pjoin')
-
- # separate join for second-level inherit
+
+ # separate join for second-level inherit
manager_join = people.join(managers).outerjoin(boss)
mapper(Company, companies, properties={
'employees':relation(Person)
})
- mapper(Person, people, select_table=person_join, polymorphic_on=people.c.type, polymorphic_identity='person', order_by=person_join.c.person_id,
+ mapper(Person, people, select_table=person_join, polymorphic_on=people.c.type, polymorphic_identity='person', order_by=person_join.c.person_id,
properties={
'paperwork':relation(Paperwork)
})
mapper(Manager, managers, select_table=manager_join, inherits=Person, polymorphic_identity='manager')
mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
mapper(Paperwork, paperwork)
-
+
def insert_data(self):
c1 = Company(name="MegaCorp, Inc.")
c2 = Company(name="Elbonia, Inc.")
Paperwork(description="review #3")
])
c1.employees = [e1, e2, b1, m1]
-
+
e3 = Engineer(name="vlad", engineer_name="vlad", primary_language="cobol", status="elbonian engineer")
c2.employees = [e3]
sess = create_session()
sess.save(c2)
sess.flush()
sess.clear()
-
+
global all_employees, c1_employees, c2_employees
all_employees = [e1, e2, b1, m1, e3]
c1_employees = [e1, e2, b1, m1]
c2_employees = [e3]
-
+
def test_load_all(self):
sess = create_session()
-
+
self.assertEquals(sess.query(Person).all(), all_employees)
-if __name__ == "__main__":
- testbase.main()
-
-
-
\ No newline at end of file
+
+if __name__ == "__main__":
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
def test_load(self):
# TODO: add persistence test also
- testbase.db.execute(foo.insert(), a='not bar', b='baz')
- testbase.db.execute(foo.insert(), a='also not bar', b='baz')
- testbase.db.execute(foo.insert(), a='i am bar', b='bar')
- testbase.db.execute(foo.insert(), a='also bar', b='bar')
+ testing.db.execute(foo.insert(), a='not bar', b='baz')
+ testing.db.execute(foo.insert(), a='also not bar', b='baz')
+ testing.db.execute(foo.insert(), a='i am bar', b='bar')
+ testing.db.execute(foo.insert(), a='also bar', b='bar')
class Foo(Base): pass
class Bar(Foo): pass
mapper(Foo, foo, polymorphic_on=foo.c.b)
- mapper(Baz, baz,
+ mapper(Baz, baz,
select_table=foo.join(baz, foo.c.b=='baz').alias('baz'),
inherits=Foo,
inherit_condition=(foo.c.a==baz.c.a),
mapper(Bar, bar,
select_table=foo.join(bar, foo.c.b=='bar').alias('bar'),
- inherits=Foo,
+ inherits=Foo,
inherit_condition=(foo.c.a==bar.c.a),
inherit_foreign_keys=[bar.c.a],
polymorphic_identity='bar')
- s = sessionmaker(bind=testbase.db)()
+ s = sessionmaker(bind=testing.db)()
assert [Baz(), Baz(), Bar(), Bar()] == s.query(Foo).all()
assert [Bar(), Bar()] == s.query(Bar).all()
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
class SingleInheritanceTest(AssertMixin):
def setUpAll(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
global employees_table
employees_table = Table('employees', metadata,
Column('employee_id', Integer, primary_key=True),
assert session.query(JuniorEngineer).all() == [e2]
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
"""basic tests of lazy loaded attributes"""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
def go():
# lazy load of a1.user should get it from the session
assert a1.user is u1
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
clear_mappers()
def test_many_to_one(self):
def go():
ad.user = None
assert ad.user is None
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
u1 = sess.query(User).filter_by(id=7).one()
def go():
assert ad not in u1.addresses
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.expire(u1, ['addresses'])
def go():
assert ad in u1.addresses
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.expire(u1, ['addresses'])
ad2 = Address()
def go():
ad2.user = u1
assert ad2.user is u1
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def go():
assert ad2 in u1.addresses
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
class M2OGetTest(FixtureTest):
keep_mappers = False
assert ad2.user.name == 'jack'
# no lazy load
assert ad3.user is None
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
class LazyTest(AssertMixin):
def setUpAll(self):
global info_table, data_table, rel_table, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
info_table = Table('infos', metadata,
Column('pk', Integer, primary_key=True),
Column('info', String(128)))
assert len(info.rels[0].datas) == 3
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
"""tests general mapper operations with an emphasis on selecting/loading"""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions, sql
from sqlalchemy.orm import *
self.assert_result([u], User, user_address_result[0])
assert u.user_name == 'jack'
assert assert_col == [('get', 'jack')], str(assert_col)
- self.assert_sql_count(testbase.db, go, 2)
+ self.assert_sql_count(testing.db, go, 2)
u.name = 'ed'
u3 = User()
m.compile()
assert account_ids_table in m._pks_by_table
assert account_stuff_table not in m._pks_by_table
- metadata.create_all(testbase.db)
+ metadata.create_all(testing.db)
try:
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
a = A()
sess.save(a)
sess.flush()
- assert testbase.db.execute(account_ids_table.count()).scalar() == 1
- assert testbase.db.execute(account_stuff_table.count()).scalar() == 0
+ assert testing.db.execute(account_ids_table.count()).scalar() == 1
+ assert testing.db.execute(account_stuff_table.count()).scalar() == 0
finally:
- metadata.drop_all(testbase.db)
+ metadata.drop_all(testing.db)
def test_mappingtoouterjoin(self):
"""test mapping to an outer join, with a composite primary key that allows nulls"""
def go():
u = sess.query(User).options(eagerload('adlist')).filter_by(user_name='jack').one()
self.assert_result(u.adlist, Address, *(user_address_result[0]['addresses'][1]))
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
@testing.uses_deprecated('//select_by')
def test_extension_options(self):
def go():
self.assert_result(l, User, *user_address_result)
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
@testing.fails_on('maxdb')
def test_eageroptionswithlimit(self):
def go():
assert u.user_id == 8
assert len(u.addresses) == 3
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
sess.clear()
u = sess.query(User).filter_by(user_id=8).one()
assert u.user_id == 8
assert len(u.addresses) == 3
- assert "tbl_row_count" not in self.capture_sql(testbase.db, go)
+ assert "tbl_row_count" not in self.capture_sql(testing.db, go)
@testing.fails_on('maxdb')
def test_lazyoptionswithlimit(self):
def go():
assert u.user_id == 8
assert len(u.addresses) == 3
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_eagerdegrade(self):
"""tests that an eager relation automatically degrades to a lazy relation if eager columns are not available"""
def go():
l = sess.query(usermapper).all()
self.assert_result(l, User, *user_address_result)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
r = users.select().execute()
l = sess.query(usermapper).instances(r)
self.assert_result(l, User, *user_address_result)
- self.assert_sql_count(testbase.db, go, 4)
+ self.assert_sql_count(testing.db, go, 4)
clear_mappers()
def go():
l = sess.query(usermapper).all()
self.assert_result(l, User, *user_all_result)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
r = users.select().execute()
l = sess.query(usermapper).instances(r)
self.assert_result(l, User, *user_all_result)
- self.assert_sql_count(testbase.db, go, 7)
+ self.assert_sql_count(testing.db, go, 7)
def test_lazyoptions(self):
l = sess.query(User).options(lazyload('addresses')).all()
def go():
self.assert_result(l, User, *user_address_result)
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
def test_deepoptions(self):
mapper(User, users,
u = sess.query(User).all()
def go():
print u[0].orders[1].items[0].keywords[1]
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
sess.clear()
def go():
print u[0].orders[1].items[0].keywords[1]
print "-------MARK2----------"
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
sess.clear()
u = q2.all()
def go():
print u[0].orders[1].items[0].keywords[1]
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
sess.clear()
print "-------MARK5----------"
q3 = sess.query(User).options(eagerload('orders.items.keywords'))
u = q3.all()
- self.assert_sql_count(testbase.db, go, 2)
+ self.assert_sql_count(testing.db, go, 2)
class DeferredTest(MapperSuperTest):
o2 = l[2]
print o2.description
- orderby = str(orders.default_order_by()[0].compile(bind=testbase.db))
- self.assert_sql(testbase.db, go, [
+ orderby = str(orders.default_order_by()[0].compile(bind=testing.db))
+ self.assert_sql(testing.db, go, [
("SELECT orders.order_id AS orders_order_id, orders.user_id AS orders_user_id, orders.isopen AS orders_isopen FROM orders ORDER BY %s" % orderby, {}),
("SELECT orders.description AS orders_description FROM orders WHERE orders.order_id = :param_1", {'param_1':3})
])
o.order_id = 7
def go():
o.description = "some description"
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_unsavedgroup(self):
"""test that deferred loading doesnt kick in when just PK cols are set"""
o.order_id = 7
def go():
o.description = "some description"
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_save(self):
m = mapper(Order, orders, properties={
assert o2.opened == 1
assert o2.userident == 7
assert o2.description == 'order 3'
- orderby = str(orders.default_order_by()[0].compile(testbase.db))
- self.assert_sql(testbase.db, go, [
+ orderby = str(orders.default_order_by()[0].compile(testing.db))
+ self.assert_sql(testing.db, go, [
("SELECT orders.order_id AS orders_order_id FROM orders ORDER BY %s" % orderby, {}),
("SELECT orders.user_id AS orders_user_id, orders.description AS orders_description, orders.isopen AS orders_isopen FROM orders WHERE orders.order_id = :param_1", {'param_1':3})
])
o2.description = 'order 3'
def go():
sess.flush()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_preserve_changes(self):
"""test that the deferred load operation doesn't revert modifications on attributes"""
assert o.description == 'somenewdescription'
def go():
assert o.opened == 1
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
assert o.description == 'somenewdescription'
assert o in sess.dirty
def go():
# therefore the flush() shouldnt actually issue any SQL
sess.flush()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_options(self):
"""tests using options on a mapper to create deferred and undeferred columns"""
l = q2.all()
print l[2].user_id
- orderby = str(orders.default_order_by()[0].compile(testbase.db))
- self.assert_sql(testbase.db, go, [
+ orderby = str(orders.default_order_by()[0].compile(testing.db))
+ self.assert_sql(testing.db, go, [
("SELECT orders.order_id AS orders_order_id, orders.description AS orders_description, orders.isopen AS orders_isopen FROM orders ORDER BY %s" % orderby, {}),
("SELECT orders.user_id AS orders_user_id FROM orders WHERE orders.order_id = :param_1", {'param_1':3})
])
def go():
l = q3.all()
print l[3].user_id
- self.assert_sql(testbase.db, go, [
+ self.assert_sql(testing.db, go, [
("SELECT orders.order_id AS orders_order_id, orders.user_id AS orders_user_id, orders.description AS orders_description, orders.isopen AS orders_isopen FROM orders ORDER BY %s" % orderby, {}),
])
assert o2.opened == 1
assert o2.userident == 7
assert o2.description == 'order 3'
- orderby = str(orders.default_order_by()[0].compile(testbase.db))
- self.assert_sql(testbase.db, go, [
+ orderby = str(orders.default_order_by()[0].compile(testing.db))
+ self.assert_sql(testing.db, go, [
("SELECT orders.user_id AS orders_user_id, orders.description AS orders_description, orders.isopen AS orders_isopen, orders.order_id AS orders_order_id FROM orders ORDER BY %s" % orderby, {}),
])
o1 = sess.query(Order).first()
def go():
assert o1.description == 'order 1'
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess = create_session()
o1 = sess.query(Order).add_column(orders.c.description).first()[0]
def go():
assert o1.description == 'order 1'
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_deepoptions(self):
m = mapper(User, users, properties={
item = l[0].orders[1].items[1]
def go():
print item.item_name
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
self.assert_(item.item_name == 'item 4')
sess.clear()
q2 = q.options(undefer('orders.items.item_name'))
item = l[0].orders[1].items[1]
def go():
print item.item_name
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
self.assert_(item.item_name == 'item 4')
class CompositeTypesTest(ORMTest):
for e1, e2 in zip(g.edges, g2.edges):
assert e1.start == e2.start
assert e1.end == e2.end
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
# test comparison of CompositeProperties to their object instances
g = sess.query(Graph).get([1, 1])
x = q.filter(users.c.user_id == 7).all()
x[0].addresses
l[0] = x
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
self.assert_result(l[0], User,
{'user_id' : 7, 'addresses' : (Address, [])},
x = q.filter(users.c.user_id == 7).all()
x[0].addresses
l[0] = x
- self.assert_sql_count(testbase.db, go, 2)
+ self.assert_sql_count(testing.db, go, 2)
self.assert_result(l[0], User,
{'user_id' : 7, 'addresses' : (Address, [{'address_id' : 1}])},
def setUpAll(self):
global metadata, t1, t2, t3, t4, t5, t6
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t1 = Table('ht1', metadata,
Column('id', Integer, primary_key=True),
Column('value', String(10)))
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import gc
from sqlalchemy import MetaData, Integer, String, ForeignKey
from sqlalchemy.orm import mapper, relation, clear_mappers, create_session
return profile
class MemUsageTest(AssertMixin):
-
+
def test_session(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
- table1 = Table("mytable", metadata,
+ table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30))
)
- table2 = Table("mytable2", metadata,
+ table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30)),
Column('col3', Integer, ForeignKey("mytable.col1"))
)
-
+
metadata.create_all()
m1 = mapper(A, table1, properties={
m2 = mapper(B, table2)
m3 = mapper(A, table1, non_primary=True)
-
+
@profile_memory
def go():
sess = create_session()
alist = sess.query(A).all()
self.assertEquals(
[
- A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
- A(col2="a2", bs=[]),
+ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
+ A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")])
- ],
+ ],
alist)
-
+
for a in alist:
sess.delete(a)
sess.flush()
go()
-
+
metadata.drop_all()
clear_mappers()
-
+
def test_mapper_reset(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
- table1 = Table("mytable", metadata,
+ table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30))
)
- table2 = Table("mytable2", metadata,
+ table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30)),
Column('col3', Integer, ForeignKey("mytable.col1"))
m2 = mapper(B, table2)
m3 = mapper(A, table1, non_primary=True)
-
+
sess = create_session()
a1 = A(col2="a1")
a2 = A(col2="a2")
alist = sess.query(A).all()
self.assertEquals(
[
- A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
- A(col2="a2", bs=[]),
+ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
+ A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")])
- ],
+ ],
alist)
-
+
for a in alist:
sess.delete(a)
sess.flush()
clear_mappers()
-
+
metadata.create_all()
try:
go()
metadata.drop_all()
def test_with_inheritance(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
- table1 = Table("mytable", metadata,
+ table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30))
)
- table2 = Table("mytable2", metadata,
+ table2 = Table("mytable2", metadata,
Column('col1', Integer, ForeignKey('mytable.col1'), primary_key=True),
Column('col3', String(30)),
)
pass
class B(A):
pass
-
+
mapper(A, table1, polymorphic_on=table1.c.col2, polymorphic_identity='a')
mapper(B, table2, inherits=A, polymorphic_identity='b')
-
+
sess = create_session()
a1 = A()
a2 = A()
self.assertEquals(
[
A(), A(), B(col3='b1'), B(col3='b2')
- ],
+ ],
alist)
for a in alist:
sess.delete(a)
sess.flush()
-
+
# dont need to clear_mappers()
del B
del A
metadata.drop_all()
def test_with_manytomany(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
- table1 = Table("mytable", metadata,
+ table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30))
)
- table2 = Table("mytable2", metadata,
+ table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30)),
)
-
+
table3 = Table('t1tot2', metadata,
Column('t1', Integer, ForeignKey('mytable.col1')),
Column('t2', Integer, ForeignKey('mytable2.col1')),
self.assertEquals(
[
A(bs=[B(col2='b1')]), A(bs=[B(col2='b2')])
- ],
+ ],
alist)
for a in alist:
finally:
metadata.drop_all()
-
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
tables.delete()
def setUp(self):
pass
-
+
def test_unsaved(self):
"""test merge of a single transient entity."""
mapper(User, users)
sess = create_session()
-
+
u = User()
u.user_id = 7
u.user_name = "fred"
a2.email_address = 'hoho@la.com'
u.addresses.append(a1)
u.addresses.append(a2)
-
+
u2 = sess.merge(u)
self.assert_result([u], User, {'user_id':7, 'user_name':'fred', 'addresses':(Address, [{'email_address':'foo@bar.com'}, {'email_address':'hoho@la.com'}])})
self.assert_result([u2], User, {'user_id':7, 'user_name':'fred', 'addresses':(Address, [{'email_address':'foo@bar.com'}, {'email_address':'hoho@la.com'}])})
'addresses':relation(mapper(Address, addresses), backref='user')
})
sess = create_session()
-
+
# set up data and save
u = User()
u.user_id = 7
sess2 = create_session()
u2 = sess2.query(User).get(7)
self.assert_result([u2], User, {'user_id':7, 'user_name':'fred', 'addresses':(Address, [{'email_address':'foo@bar.com'}, {'email_address':'hoho@la.com'}])})
-
+
# make local changes to data
u.user_name = 'fred2'
u.addresses[1].email_address = 'hoho@lalala.com'
-
+
# new session, merge modified data into session
sess3 = create_session()
u3 = sess3.merge(u)
# insure local changes are pending
self.assert_result([u3], User, {'user_id':7, 'user_name':'fred2', 'addresses':(Address, [{'email_address':'foo@bar.com'}, {'email_address':'hoho@lalala.com'}])})
-
+
# save merged data
sess3.flush()
-
+
# assert modified/merged data was saved
sess.clear()
u = sess.query(User).get(7)
def go():
sess4.flush()
# no changes; therefore flush should do nothing
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
# test with "dontload" merge
sess5 = create_session()
print "------------------"
def go():
sess5.flush()
# no changes; therefore flush should do nothing
- # but also, dont_load wipes out any difference in committed state,
+ # but also, dont_load wipes out any difference in committed state,
# so no flush at all
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
sess4 = create_session()
u = sess4.merge(u, dont_load=True)
def go():
sess4.flush()
# afafds change flushes
- self.assert_sql_count(testbase.db, go, 1)
-
+ self.assert_sql_count(testing.db, go, 1)
+
sess5 = create_session()
u2 = sess5.query(User).get(u.user_id)
assert u2.user_name == 'fred2'
mapper(Order, orders, properties={
'items':relation(mapper(Item, orderitems))
})
-
+
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses)),
'orders':relation(Order, backref='customer')
})
-
+
sess = create_session()
u = User()
u.user_name='fred'
o.items.append(i1)
o.items.append(i2)
u.orders.append(o)
-
+
sess.save(u)
sess.flush()
-
+
sess2 = create_session()
u2 = sess2.query(User).get(u.user_id)
u.orders[0].items[1].item_name = 'item 2 modified'
o.customer.user_name = 'also fred'
sess2.merge(o)
assert o2.customer.user_name == 'also fred'
-
+
def test_saved_cascade_3(self):
"""test merge of a persistent entity with one_to_one relationship"""
def test_noload_with_eager(self):
"""this test illustrates that with noload=True, we can't just
copy the committed_state of the merged instance over; since it references collection objects
- which themselves are to be merged. This committed_state would instead need to be piecemeal
- 'converted' to represent the correct objects.
+ which themselves are to be merged. This committed_state would instead need to be piecemeal
+ 'converted' to represent the correct objects.
However, at the moment I'd rather not support this use case; if you are merging with dont_load=True,
you're typically dealing with caching and the merged objects shouldnt be "dirty".
"""
-
+
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses),uselist = True)
})
sess2 = create_session()
u2 = sess2.query(User).options(eagerload('addresses')).get(7)
-
+
sess3 = create_session()
u3 = sess3.merge(u2, dont_load=True)
def go():
sess3.flush()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_noload_disallows_dirty(self):
- """noload doesnt support 'dirty' objects right now (see test_noload_with_eager()).
+ """noload doesnt support 'dirty' objects right now (see test_noload_with_eager()).
Therefore lets assert it."""
-
+
mapper(User, users)
sess = create_session()
u = User()
assert False
except exceptions.InvalidRequestError, e:
assert "merge() with dont_load=True option does not support objects marked as 'dirty'. flush() all changes on mapped instances before merging with dont_load=True." in str(e)
-
+
u2 = sess2.query(User).get(7)
-
+
sess3 = create_session()
u3 = sess3.merge(u2, dont_load=True)
assert not sess3.dirty
def go():
sess3.flush()
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def test_noload_sets_entityname(self):
"""test that a noload-merged entity has entity_name set, has_mapper() passes, and lazyloads work"""
mapper(User, users, properties={
sess.save(u)
sess.flush()
sess.clear()
-
+
# reload 'u' such that its addresses list hasn't loaded
u = sess.query(User).get(7)
-
+
sess2 = create_session()
u2 = sess2.merge(u, dont_load=True)
assert not sess2.dirty
def go():
assert u2.addresses != []
assert len(u2.addresses) == 1
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_noload_sets_backrefs(self):
mapper(User, users, properties={
assert not sess2.dirty
def go():
assert u2.addresses[0].user is u2
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def test_noload_preserves_parents(self):
"""test that merge with noload does not trigger a 'delete-orphan' operation.
-
+
merge with noload sets attributes without using events. this means the
'hasparent' flag is not propagated to the newly merged instance. in fact this
- works out OK, because the '_state.parents' collection on the newly
+ works out OK, because the '_state.parents' collection on the newly
merged instance is empty; since the mapper doesn't see an active 'False' setting
- in this collection when _is_orphan() is called, it does not count as an orphan
+ in this collection when _is_orphan() is called, it does not count as an orphan
(i.e. this is the 'optimistic' logic in mapper._is_orphan().)
"""
-
+
mapper(User, users, properties={
'addresses':relation(mapper(Address, addresses),backref='user', cascade="all, delete-orphan")
})
sess2.flush()
sess2.clear()
assert sess2.query(User).get(u2.user_id).addresses[0].email_address == 'somenewaddress'
-
+
# this use case is not supported; this is with a pending Address on the pre-merged
- # object, and we currently dont support 'dirty' objects being merged with dont_load=True.
+ # object, and we currently dont support 'dirty' objects being merged with dont_load=True.
# in this case, the empty '_state.parents' collection would be an issue,
# since the optimistic flag is False in _is_orphan() for pending instances.
# so if we start supporting 'dirty' with dont_load=True, this test will need to pass
assert sess2.query(User).get(u2.user_id).addresses[0].email_address == 'somenewaddress'
except exceptions.InvalidRequestError, e:
assert "dont_load=True option does not support" in str(e)
-
-
-if __name__ == "__main__":
- testbase.main()
+
+
+if __name__ == "__main__":
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy import exceptions
class NaturalPKTest(ORMTest):
def define_tables(self, metadata):
global users, addresses, items, users_to_items
-
+
users = Table('users', metadata,
Column('username', String(50), primary_key=True),
Column('fullname', String(100)))
-
+
addresses = Table('addresses', metadata,
Column('email', String(50), primary_key=True),
Column('username', String(50), ForeignKey('users.username', onupdate="cascade")))
-
+
items = Table('items', metadata,
Column('itemname', String(50), primary_key=True),
Column('description', String(100)))
-
+
users_to_items = Table('userstoitems', metadata,
Column('username', String(50), ForeignKey('users.username', onupdate='cascade'), primary_key=True),
Column('itemname', String(50), ForeignKey('items.itemname', onupdate='cascade'), primary_key=True),
)
-
+
def test_entity(self):
mapper(User, users)
-
+
sess = create_session()
u1 = User(username='jack', fullname='jack')
-
+
sess.save(u1)
sess.flush()
assert sess.get(User, 'jack') is u1
-
+
u1.username = 'ed'
sess.flush()
-
+
def go():
assert sess.get(User, 'ed') is u1
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
assert sess.get(User, 'jack') is None
-
+
sess.clear()
u1 = sess.query(User).get('ed')
self.assertEquals(User(username='ed', fullname='jack'), u1)
assert sess.get(User, 'jack') is u1
users.update(values={u1.c.username:'jack'}).execute(username='ed')
-
+
try:
# expire/refresh works off of primary key. the PK is gone
# in this case so theres no way to look it up. criterion-
sess.clear()
assert sess.get(User, 'jack') is None
assert sess.get(User, 'ed').fullname == 'jack'
-
+
@testing.unsupported('sqlite','mysql')
def test_onetomany_passive(self):
self._test_onetomany(True)
-
+
def test_onetomany_nonpassive(self):
self._test_onetomany(False)
-
+
def _test_onetomany(self, passive_updates):
mapper(User, users, properties={
'addresses':relation(Address, passive_updates=passive_updates)
})
mapper(Address, addresses)
-
+
sess = create_session()
u1 = User(username='jack', fullname='jack')
u1.addresses.append(Address(email='jack1'))
u1.addresses.append(Address(email='jack2'))
sess.save(u1)
sess.flush()
-
+
assert sess.get(Address, 'jack1') is u1.addresses[0]
-
+
u1.username = 'ed'
sess.flush()
assert u1.addresses[0].username == 'ed'
-
+
sess.clear()
self.assertEquals([Address(username='ed'), Address(username='ed')], sess.query(Address).all())
-
+
u1 = sess.get(User, 'ed')
u1.username = 'jack'
def go():
sess.flush()
if not passive_updates:
- self.assert_sql_count(testbase.db, go, 4) # test passive_updates=False; load addresses, update user, update 2 addresses
+ self.assert_sql_count(testing.db, go, 4) # test passive_updates=False; load addresses, update user, update 2 addresses
else:
- self.assert_sql_count(testbase.db, go, 1) # test passive_updates=True; update user
+ self.assert_sql_count(testing.db, go, 1) # test passive_updates=True; update user
sess.clear()
assert User(username='jack', addresses=[Address(username='jack'), Address(username='jack')]) == sess.get(User, 'jack')
-
+
u1 = sess.get(User, 'jack')
u1.addresses = []
u1.username = 'fred'
def test_manytoone_nonpassive(self):
self._test_manytoone(False)
-
+
def _test_manytoone(self, passive_updates):
mapper(User, users)
mapper(Address, addresses, properties={
'user':relation(User, passive_updates=passive_updates)
})
-
+
sess = create_session()
a1 = Address(email='jack1')
a2 = Address(email='jack2')
-
+
u1 = User(username='jack', fullname='jack')
a1.user = u1
a2.user = u1
sess.save(a1)
sess.save(a2)
sess.flush()
-
+
u1.username = 'ed'
-
+
print id(a1), id(a2), id(u1)
print u1._state.parents
def go():
sess.flush()
if passive_updates:
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
else:
- self.assert_sql_count(testbase.db, go, 3)
-
+ self.assert_sql_count(testing.db, go, 3)
+
def go():
sess.flush()
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
assert a1.username == a2.username == 'ed'
sess.clear()
self.assertEquals([Address(username='ed'), Address(username='ed')], sess.query(Address).all())
def go():
sess.flush()
if passive_updates:
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
else:
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
self.assertEquals([Address(username='ed'), Address(username='ed')], [ad1, ad2])
sess.clear()
self.assertEquals([Address(username='ed'), Address(username='ed')], sess.query(Address).all())
sess.flush()
# check that the passive_updates is on on the other side
if passive_updates:
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
else:
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
sess.clear()
self.assertEquals([Address(username='fred'), Address(username='fred')], sess.query(Address).all())
-
-
+
+
@testing.unsupported('sqlite', 'mysql')
def test_manytomany_passive(self):
self._test_manytomany(True)
-
+
def test_manytomany_nonpassive(self):
self._test_manytomany(False)
-
+
def _test_manytomany(self, passive_updates):
mapper(User, users, properties={
'items':relation(Item, secondary=users_to_items, backref='users', passive_updates=passive_updates)
})
mapper(Item, items)
-
+
sess = create_session()
u1 = User(username='jack')
u2 = User(username='fred')
i1 = Item(itemname='item1')
i2 = Item(itemname='item2')
-
+
u1.items.append(i1)
u1.items.append(i2)
i2.users.append(u2)
self.assertEquals(['jack'], [u.username for u in r[0].users])
self.assertEquals(Item(itemname='item2'), r[1])
self.assertEquals(['jack', 'fred'], [u.username for u in r[1].users])
-
+
u2.username='ed'
def go():
sess.flush()
go()
def go():
sess.flush()
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
sess.clear()
r = sess.query(Item).all()
self.assertEquals(Item(itemname='item1'), r[0])
self.assertEquals(['jack'], [u.username for u in r[0].users])
self.assertEquals(Item(itemname='item2'), r[1])
self.assertEquals(['ed', 'jack'], sorted([u.username for u in r[1].users]))
-
+
class SelfRefTest(ORMTest):
def define_tables(self, metadata):
global nodes, Node
-
+
nodes = Table('nodes', metadata,
Column('name', String(50), primary_key=True),
Column('parent', String(50), ForeignKey('nodes.name', onupdate='cascade'))
)
-
+
class Node(Base):
pass
-
+
def test_onetomany(self):
mapper(Node, nodes, properties={
'children':relation(Node, backref=backref('parentnode', remote_side=nodes.c.name, passive_updates=False), passive_updates=False)
})
-
+
sess = create_session()
n1 = Node(name='n1')
n1.children.append(Node(name='n11'))
n1.children.append(Node(name='n13'))
sess.save(n1)
sess.flush()
-
+
n1.name = 'new n1'
sess.flush()
self.assertEquals(n1.children[1].parent, 'new n1')
self.assertEquals(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter(Node.name.in_(['n11', 'n12', 'n13']))])
-
-
+
+
class NonPKCascadeTest(ORMTest):
def define_tables(self, metadata):
global users, addresses
sess.save(u1)
sess.flush()
a1 = u1.addresses[0]
-
+
self.assertEquals(select([addresses.c.username]).execute().fetchall(), [('jack',), ('jack',)])
-
+
assert sess.get(Address, a1.id) is u1.addresses[0]
u1.username = 'ed'
def go():
sess.flush()
if not passive_updates:
- self.assert_sql_count(testbase.db, go, 4) # test passive_updates=False; load addresses, update user, update 2 addresses
+ self.assert_sql_count(testing.db, go, 4) # test passive_updates=False; load addresses, update user, update 2 addresses
else:
- self.assert_sql_count(testbase.db, go, 1) # test passive_updates=True; update user
+ self.assert_sql_count(testing.db, go, 1) # test passive_updates=True; update user
sess.clear()
assert User(username='jack', addresses=[Address(username='jack'), Address(username='jack')]) == sess.get(User, u1.id)
sess.clear()
-
+
u1 = sess.get(User, u1.id)
u1.addresses = []
u1.username = 'fred'
u1 = sess.get(User, u1.id)
self.assertEquals(User(username='fred', fullname='jack'), u1)
-
+
if __name__ == '__main__':
- testbase.main()
-
-
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.sessioncontext import SessionContext
@testing.uses_deprecated('SessionContext')
def setUpAll(self):
global jack, port, metadata, ctx
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
ctx = SessionContext(create_session)
jack = Table('jack', metadata,
Column('id', Integer, primary_key=True),
ctx.current.flush()
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
class EmailUser(User):
pass
-
+
class PickleTest(FixtureTest):
keep_mappers = False
keep_data = False
-
+
def test_transient(self):
mapper(User, users, properties={
'addresses':relation(Address, backref="user")
})
mapper(Address, addresses)
-
+
sess = create_session()
u1 = User(name='ed')
u1.addresses.append(Address(email_address='ed@bar.com'))
-
+
u2 = pickle.loads(pickle.dumps(u1))
sess.save(u2)
sess.flush()
-
+
sess.clear()
-
+
self.assertEquals(u1, sess.query(User).get(u2.id))
-
+
def test_class_deferred_cols(self):
mapper(User, users, properties={
'name':deferred(users.c.name),
u1 = sess.query(User).get(u1.id)
assert 'name' not in u1.__dict__
assert 'addresses' not in u1.__dict__
-
+
u2 = pickle.loads(pickle.dumps(u1))
sess2 = create_session()
sess2.update(u2)
u2 = sess2.merge(u2, dont_load=True)
self.assertEquals(u2.name, 'ed')
self.assertEquals(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
-
+
def test_instance_deferred_cols(self):
mapper(User, users, properties={
'addresses':relation(Address, backref="user")
})
mapper(Address, addresses)
-
+
sess = create_session()
u1 = User(name='ed')
u1.addresses.append(Address(email_address='ed@bar.com'))
sess.save(u1)
sess.flush()
sess.clear()
-
+
u1 = sess.query(User).options(defer('name'), defer('addresses.email_address')).get(u1.id)
assert 'name' not in u1.__dict__
assert 'addresses' not in u1.__dict__
-
+
u2 = pickle.loads(pickle.dumps(u1))
sess2 = create_session()
sess2.update(u2)
Column('id', Integer, ForeignKey('users.id'), primary_key=True),
Column('email_address', String(30))
)
-
+
def test_polymorphic_deferred(self):
mapper(User, users, polymorphic_identity='user', polymorphic_on=users.c.type, polymorphic_fetch='deferred')
mapper(EmailUser, email_users, inherits=User, polymorphic_identity='emailuser')
-
+
eu = EmailUser(name="user1", email_address='foo@bar.com')
sess = create_session()
sess.save(eu)
sess.flush()
sess.clear()
-
+
eu = sess.query(User).first()
eu2 = pickle.loads(pickle.dumps(eu))
sess2 = create_session()
sess2.update(eu2)
assert 'email_address' not in eu2.__dict__
self.assertEquals(eu2.email_address, 'foo@bar.com')
-
-
-
+
if __name__ == '__main__':
- testbase.main()
\ No newline at end of file
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import operator
from sqlalchemy import *
from sqlalchemy import exceptions
def test_get(self):
mapper(User, uni_users)
- assert User(id=7) == create_session(bind=testbase.db).query(User).get(7)
+ assert User(id=7) == create_session(bind=testing.db).query(User).get(7)
class GetTest(QueryTest):
def test_get(self):
Address(id=5)
]),
] == q.all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
class YieldTest(QueryTest):
def go():
l = q.options(contains_alias('ulist'), contains_eager('addresses')).instances(query.execute())
assert fixtures.user_address_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
def go():
l = q.options(contains_alias('ulist'), contains_eager('addresses')).from_statement(query).all()
assert fixtures.user_address_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_contains_eager(self):
def go():
l = q.options(contains_eager('addresses')).instances(selectquery.execute())
assert fixtures.user_address_result[0:3] == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
def go():
l = q.options(contains_eager('addresses')).from_statement(selectquery).all()
assert fixtures.user_address_result[0:3] == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_alias(self):
adalias = addresses.alias('adalias')
# test using a string alias name
l = q.options(contains_eager('addresses', alias="adalias")).instances(selectquery.execute())
assert fixtures.user_address_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
def go():
# test using the Alias object itself
l = q.options(contains_eager('addresses', alias=adalias)).instances(selectquery.execute())
assert fixtures.user_address_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
# test using a custom 'decorate' function
l = q.options(contains_eager('addresses', decorator=decorate)).instances(selectquery.execute())
assert fixtures.user_address_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
oalias = orders.alias('o1')
def go():
l = q.options(contains_eager('orders', alias='o1'), contains_eager('orders.items', alias='i1')).instances(query.execute())
assert fixtures.user_order_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
def go():
l = q.options(contains_eager('orders', alias=oalias), contains_eager('orders.items', alias=ialias)).instances(query.execute())
assert fixtures.user_order_result == l
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
]),
Order(description=u'order 5',items=[Item(description=u'item 5',keywords=[])])])
])
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
sel2 = orders.select(orders.c.id.in_([1,2,3]))
User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])
]
)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
def go():
self.assertEquals(sess.query(User).options(eagerload('addresses')).select_from(sel).filter(User.c.id==8).all(),
[User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])]
)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
def go():
self.assertEquals(sess.query(User).options(eagerload('addresses')).select_from(sel)[1], User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)]))
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
class CustomJoinTest(QueryTest):
keep_mappers = False
sess.clear()
def go():
assert address_result == sess.query(Address).options(eagerload('user')).all()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
tuple_address_result = [(address, address.user) for address in address_result]
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime
from sqlalchemy import *
from sqlalchemy import exceptions, types
)
def setUp(self):
global session
- session = create_session(bind=testbase.db)
- conn = testbase.db.connect()
+ session = create_session(bind=testing.db)
+ conn = testing.db.connect()
conn.create(tbl_a)
conn.create(tbl_b)
conn.create(tbl_c)
session.save_or_update(b)
def tearDown(self):
- conn = testbase.db.connect()
+ conn = testing.db.connect()
conn.drop(tbl_d)
conn.drop(tbl_c)
conn.drop(tbl_b)
conn.drop(tbl_a)
def tearDownAll(self):
- metadata.drop_all(testbase.db)
+ metadata.drop_all(testing.db)
def testDeleteRootTable(self):
session.flush()
def setUpAll(self):
global metadata, company_tbl, employee_tbl
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
company_tbl = Table('company', metadata,
Column('company_id', Integer, primary_key=True),
def setUpAll(self):
global jobs, pageversions, pages, metadata, Job, Page, PageVersion, PageComment
import datetime
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
jobs = Table("jobs", metadata,
Column("jobno", Unicode(15), primary_key=True),
Column("created", DateTime, nullable=False, default=datetime.datetime.now),
s.save(j1)
s.save(j2)
-
+
s.flush()
s.clear()
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
"""all tests involving generic mapping to Select statements"""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
Column('data', Integer),
Column('extra', String(45)),
)
-
+
def test_no_tables(self):
class Subset(object):
pass
assert False
except exceptions.InvalidRequestError, e:
assert str(e) == "Could not find any Table objects in mapped table 'SELECT x, y, z'", str(e)
-
+
def test_basic(self):
class Subset(Base):
pass
subset_select = select([common_table.c.id, common_table.c.data]).alias('subset')
subset_mapper = mapper(Subset, subset_select)
- sess = create_session(bind=testbase.db)
+ sess = create_session(bind=testing.db)
l = Subset()
l.data = 1
sess.save(l)
assert [Subset(data=1)] == sess.query(Subset).all()
# TODO: more tests mapping to selects
-
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from sqlalchemy.orm import *
def test_close(self):
"""test that flush() doenst close a connection the session didnt open"""
- c = testbase.db.connect()
+ c = testing.db.connect()
class User(object):pass
mapper(User, users)
s = create_session(bind=c)
s.flush()
def test_close_two(self):
- c = testbase.db.connect()
+ c = testing.db.connect()
try:
class User(object):pass
mapper(User, users)
@engines.close_open_connections
def test_binds_from_expression(self):
"""test that Session can extract Table objects from ClauseElements and match them to tables."""
- Session = sessionmaker(binds={users:testbase.db, addresses:testbase.db})
+ Session = sessionmaker(binds={users:testing.db, addresses:testing.db})
sess = Session()
sess.execute(users.insert(), params=dict(user_id=1, user_name='ed'))
assert sess.execute(users.select()).fetchall() == [(1, 'ed')]
mapper(User, users, properties={
'addresses':relation(Address, backref=backref("user", cascade="all"), cascade="all")
})
- Session = sessionmaker(binds={User:testbase.db, Address:testbase.db})
+ Session = sessionmaker(binds={User:testing.db, Address:testing.db})
sess.execute(users.insert(), params=dict(user_id=2, user_name='fred'))
assert sess.execute(users.select()).fetchall() == [(1, 'ed'), (2, 'fred')]
sess.close()
def test_transaction(self):
class User(object):pass
mapper(User, users)
- conn1 = testbase.db.connect()
- conn2 = testbase.db.connect()
+ conn1 = testing.db.connect()
+ conn2 = testing.db.connect()
sess = create_session(transactional=True, bind=conn1)
u = User()
assert conn2.execute("select count(1) from users").scalar() == 0
sess.commit()
assert conn1.execute("select count(1) from users").scalar() == 1
- assert testbase.db.connect().execute("select count(1) from users").scalar() == 1
+ assert testing.db.connect().execute("select count(1) from users").scalar() == 1
sess.close()
@testing.unsupported('sqlite', 'mssql') # TEMP: test causes mssql to hang
def test_autoflush(self):
class User(object):pass
mapper(User, users)
- conn1 = testbase.db.connect()
- conn2 = testbase.db.connect()
+ conn1 = testing.db.connect()
+ conn2 = testing.db.connect()
sess = create_session(bind=conn1, transactional=True, autoflush=True)
u = User()
assert conn2.execute("select count(1) from users").scalar() == 0
sess.commit()
assert conn1.execute("select count(1) from users").scalar() == 1
- assert testbase.db.connect().execute("select count(1) from users").scalar() == 1
+ assert testing.db.connect().execute("select count(1) from users").scalar() == 1
sess.close()
@testing.unsupported('sqlite', 'mssql') # TEMP: test causes mssql to hang
u2 = sess.query(User).filter_by(user_name='ed').one()
assert u2 is u
assert sess.execute("select count(1) from users", mapper=User).scalar() == 1
- assert testbase.db.connect().execute("select count(1) from users").scalar() == 0
+ assert testing.db.connect().execute("select count(1) from users").scalar() == 0
sess.commit()
assert sess.execute("select count(1) from users", mapper=User).scalar() == 1
- assert testbase.db.connect().execute("select count(1) from users").scalar() == 1
+ assert testing.db.connect().execute("select count(1) from users").scalar() == 1
sess.close()
except:
sess.rollback()
def test_autoflush_2(self):
class User(object):pass
mapper(User, users)
- conn1 = testbase.db.connect()
- conn2 = testbase.db.connect()
+ conn1 = testing.db.connect()
+ conn2 = testing.db.connect()
sess = create_session(bind=conn1, transactional=True, autoflush=True)
u = User()
sess.save(u)
sess.commit()
assert conn1.execute("select count(1) from users").scalar() == 1
- assert testbase.db.connect().execute("select count(1) from users").scalar() == 1
+ assert testing.db.connect().execute("select count(1) from users").scalar() == 1
sess.commit()
# TODO: not doing rollback of attributes right now.
def test_external_joined_transaction(self):
class User(object):pass
mapper(User, users)
- conn = testbase.db.connect()
+ conn = testing.db.connect()
trans = conn.begin()
sess = create_session(bind=conn, transactional=True, autoflush=True)
sess.begin()
class User(object):pass
mapper(User, users)
try:
- conn = testbase.db.connect()
+ conn = testing.db.connect()
trans = conn.begin()
sess = create_session(bind=conn, transactional=True, autoflush=True)
u1 = User()
'oracle', 'maxdb')
@engines.close_open_connections
def test_heavy_nesting(self):
- session = create_session(bind=testbase.db)
+ session = create_session(bind=testing.db)
session.begin()
session.connection().execute("insert into users (user_name) values ('user1')")
mapper(User, users)
mapper(Address, addresses)
- engine2 = create_engine(testbase.db.url)
+ engine2 = create_engine(testing.db.url)
sess = create_session(transactional=False, autoflush=False, twophase=True)
- sess.bind_mapper(User, testbase.db)
+ sess.bind_mapper(User, testing.db)
sess.bind_mapper(Address, engine2)
sess.begin()
u1 = User()
def test_bound_connection(self):
class User(object):pass
mapper(User, users)
- c = testbase.db.connect()
+ c = testing.db.connect()
sess = create_session(bind=c)
sess.create_transaction()
transaction = sess.transaction
u = User()
sess.save(u)
sess.flush()
- assert transaction.get_or_add(testbase.db) is transaction.get_or_add(c) is c
+ assert transaction.get_or_add(testing.db) is transaction.get_or_add(c) is c
try:
- transaction.add(testbase.db.connect())
+ transaction.add(testing.db.connect())
assert False
except exceptions.InvalidRequestError, e:
assert str(e) == "Session already has a Connection associated for the given Connection's Engine"
try:
- transaction.get_or_add(testbase.db.connect())
+ transaction.get_or_add(testing.db.connect())
assert False
except exceptions.InvalidRequestError, e:
assert str(e) == "Session already has a Connection associated for the given Connection's Engine"
try:
- transaction.add(testbase.db)
+ transaction.add(testing.db)
assert False
except exceptions.InvalidRequestError, e:
assert str(e) == "Session already has a Connection associated for the given Engine"
def test_bound_connection_transactional(self):
class User(object):pass
mapper(User, users)
- c = testbase.db.connect()
+ c = testing.db.connect()
sess = create_session(bind=c, transactional=True)
u = User()
c.execute("delete from users")
assert c.scalar("select count(1) from users") == 0
- c = testbase.db.connect()
+ c = testing.db.connect()
trans = c.begin()
sess = create_session(bind=c, transactional=False)
b = Bar()
assert b in sess
assert len(list(sess)) == 1
-
-
+
+
class ScopedSessionTest(ORMTest):
def define_tables(self, metadata):
class ScopedMapperTest(PersistTest):
def setUpAll(self):
global metadata, table, table2
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30)))
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.sessioncontext import SessionContext
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
def suite():
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime, os
from sqlalchemy import *
from sqlalchemy import exceptions, sql
class ShardTest(PersistTest):
def setUpAll(self):
global db1, db2, db3, db4, weather_locations, weather_reports
-
+
db1 = create_engine('sqlite:///shard1.db')
db2 = create_engine('sqlite:///shard2.db')
db3 = create_engine('sqlite:///shard3.db')
Column('temperature', Float),
Column('report_time', DateTime, default=datetime.datetime.now),
)
-
+
for db in (db1, db2, db3, db4):
meta.create_all(db)
-
+
db1.execute(ids.insert(), nextid=1)
self.setup_session()
self.setup_mappers()
-
+
def tearDownAll(self):
for db in (db1, db2, db3, db4):
- db.connect().invalidate()
+ db.connect().invalidate()
for i in range(1,5):
os.remove("shard%d.db" % i)
'Europe':'europe',
'South America':'south_america'
}
-
+
def shard_chooser(mapper, instance, clause=None):
if isinstance(instance, WeatherLocation):
return shard_lookup[instance.continent]
return ['north_america', 'asia', 'europe', 'south_america']
else:
return ids
-
+
create_session = sessionmaker(class_=ShardedSession, autoflush=True, transactional=True)
create_session.configure(shards={
'europe':db3,
'south_america':db4
}, shard_chooser=shard_chooser, id_chooser=id_chooser, query_chooser=query_chooser)
-
+
def setup_mappers(self):
global WeatherLocation, Report
-
+
class WeatherLocation(object):
def __init__(self, continent, city):
self.continent = continent
'city': deferred(weather_locations.c.city),
})
- mapper(Report, weather_reports)
+ mapper(Report, weather_reports)
def test_roundtrip(self):
tokyo = WeatherLocation('Asia', 'Tokyo')
assert db2.execute(weather_locations.select()).fetchall() == [(1, 'Asia', 'Tokyo')]
assert db1.execute(weather_locations.select()).fetchall() == [(2, 'North America', 'New York'), (3, 'North America', 'Toronto')]
-
+
t = sess.query(WeatherLocation).get(tokyo.id)
assert t.city == tokyo.city
assert t.reports[0].temperature == 80.0
if __name__ == '__main__':
- testbase.main()
-
+ testenv.main()
"""Tests unitofwork operations."""
-import testbase
+import testenv; testenv.configure_for_tests()
import pickleable
from sqlalchemy import *
from sqlalchemy import exceptions, sql
success = True
# Only dialects with a sane rowcount can detect the ConcurrentModificationError
- if testbase.db.dialect.supports_sane_rowcount:
+ if testing.db.dialect.supports_sane_rowcount:
assert success
s.close()
except exceptions.ConcurrentModificationError, e:
#print e
success = True
- if testbase.db.dialect.supports_sane_multi_rowcount:
+ if testing.db.dialect.supports_sane_multi_rowcount:
assert success
@engines.close_open_connections
Session.commit()
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
f1.val = unicode('someothervalue')
- self.assert_sql(testbase.db, lambda: Session.commit(), [
+ self.assert_sql(testing.db, lambda: Session.commit(), [
(
"UPDATE mutabletest SET val=:val WHERE mutabletest.id = :mutabletest_id",
{'mutabletest_id': f1.id, 'val': u'someothervalue'}
])
f1.val = unicode('hi')
f1.data.x = 9
- self.assert_sql(testbase.db, lambda: Session.commit(), [
+ self.assert_sql(testing.db, lambda: Session.commit(), [
(
"UPDATE mutabletest SET data=:data, val=:val WHERE mutabletest.id = :mutabletest_id",
{'mutabletest_id': f1.id, 'val': u'hi', 'data':f1.data}
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
Session.close()
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
f2.data.y = 19
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
Session.close()
f3 = Session.query(Foo).filter_by(id=f1.id).one()
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_unicode(self):
"""test that two equivalent unicode values dont get flagged as changed.
f1.val = u'hi'
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
class MutableTypesTest2(ORMTest):
def define_tables(self, metadata):
Session.commit()
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
f1.data = [{'personne': {'nom': u'Smith', 'pers_id': 1, 'prenom': u'john', 'civilite': u'Mr', \
'int_3': False, 'int_2': False, 'int_1': u'23', 'VenSoir': True, 'str_1': u'Test', \
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
f1.data[0]['personne']['VenSoir']= False
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
Session.clear()
f = Session.query(Foo).get(f1.id)
sess.flush()
def go():
assert u.counter == 2
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def test_multi_update(self):
class User(object):
def go():
assert u.name == 'test2'
assert u.counter == 2
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
sess.clear()
u = sess.query(User).get(u.id)
defaults back from the engine."""
def define_tables(self, metadata):
- db = testbase.db
+ db = testing.db
use_string_defaults = db.engine.__module__.endswith('postgres') or db.engine.__module__.endswith('oracle') or db.engine.__module__.endswith('sqlite')
global hohoval, althohoval
-
+
if use_string_defaults:
hohotype = String(30)
hohoval = "im hoho"
def go():
# test deferred load of attribues, one select per instance
self.assert_(h2.hoho==h4.hoho==h5.hoho==hohoval)
- self.assert_sql_count(testbase.db, go, 3)
+ self.assert_sql_count(testing.db, go, 3)
def go():
self.assert_(h1.counter == h4.counter==h5.counter==7)
- self.assert_sql_count(testbase.db, go, 1)
+ self.assert_sql_count(testing.db, go, 1)
def go():
self.assert_(h3.counter == h2.counter == 12)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
self.assert_(h5.foober=='im the new foober')
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
Session.close()
mapper(Hoho, default_table, eager_defaults=True)
h1 = Hoho()
Session.commit()
-
+
def go():
self.assert_(h1.hoho==hohoval)
- self.assert_sql_count(testbase.db, go, 0)
-
+ self.assert_sql_count(testing.db, go, 0)
+
def test_insert_nopostfetch(self):
# populates the PassiveDefaults explicitly so there is no "post-update"
class Hoho(object):pass
self.assert_(h1.hoho=="15")
self.assert_(h1.counter=="15")
self.assert_(h1.foober=="im foober")
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_update(self):
class Hoho(object):pass
u2.user_name = 'user2modified'
u1.addresses.append(a3)
del u1.addresses[0]
- self.assert_sql(testbase.db, lambda: Session.commit(),
+ self.assert_sql(testing.db, lambda: Session.commit(),
[
(
"UPDATE users SET user_name=:user_name WHERE users.user_id = :users_user_id",
u = Session.query(User).filter(User.user_name=='some name').one()
def go():
u.user_name = 'some other name'
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
Session.flush()
assert list(Session.execute(users.select(), mapper=User)) == [(42, 'some other name')]
u.user_name = ""
def go():
Session.commit()
- self.assert_sql_count(testbase.db, go, 0)
+ self.assert_sql_count(testing.db, go, 0)
def test_multitable(self):
"""tests a save of an object where each instance spans two tables. also tests
objects[2].email_address = 'imnew@foo.bar'
objects[3].user = User()
objects[3].user.user_name = 'imnewlyadded'
- self.assert_sql(testbase.db, lambda: Session.commit(), [
+ self.assert_sql(testing.db, lambda: Session.commit(), [
(
"INSERT INTO users (user_name) VALUES (:user_name)",
{'user_name': 'imnewlyadded'}
k = Keyword()
k.name = 'yellow'
objects[5].keywords.append(k)
- self.assert_sql(testbase.db, lambda:Session.commit(), [
+ self.assert_sql(testing.db, lambda:Session.commit(), [
{
"UPDATE items SET item_name=:item_name WHERE items.item_id = :items_item_id":
{'item_name': 'item4updated', 'items_item_id': objects[4].item_id}
objects[2].keywords.append(k)
dkid = objects[5].keywords[1].keyword_id
del objects[5].keywords[1]
- self.assert_sql(testbase.db, lambda:Session.commit(), [
+ self.assert_sql(testing.db, lambda:Session.commit(), [
(
"DELETE FROM itemkeywords WHERE itemkeywords.item_id = :item_id AND itemkeywords.keyword_id = :keyword_id",
[{'item_id': objects[5].item_id, 'keyword_id': dkid}]
a.user = User()
a.user.user_name = elem['user_name']
objects.append(a)
- self.assert_sql(testbase.db, lambda: Session.commit(), [
+ self.assert_sql(testing.db, lambda: Session.commit(), [
(
"INSERT INTO users (user_name) VALUES (:user_name)",
{'user_name': 'thesub'}
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.simple_setup()
from sqlalchemy import *
from sqlalchemy.orm import *
-from testlib import *
from timeit import Timer
import sys
+
meta = MetaData()
orders = Table('orders', meta,
for valueid in range(5):
val = Value()
val.attribute = attr
-
+
def run(self, number):
s = create_session()
self.order = order = Order()
-import testbase
+import testenv; testenv.simple_setup()
import sys, time
from sqlalchemy import *
from sqlalchemy.orm import *
-from testlib import *
+from testlib import profiling
db = create_engine('sqlite://')
metadata = MetaData(db)
c = conn.cursor()
persons = [('john doe', 1, 35) for i in xrange(n)]
c.executemany("insert into Person(name, sex, age) values (?,?,?)", persons)
-
+
@profiling.profiled('sa_profiled_insert_many', always=True)
def sa_profiled_insert_many(n):
i = Person_table.insert()
c = conn.cursor()
for j in xrange(n):
c.execute("insert into Person(name, sex, age) values (?,?,?)",
- ('john doe', 1, 35))
+ ('john doe', 1, 35))
def sa_unprofiled_insert(n):
# Another option is to build Person_table.insert() outside of the
sys.stdout.write("%s (%s): " % (label, ', '.join([str(a) for a in args])))
sys.stdout.flush()
-
+
t = time.clock()
fn(*args, **kw)
t2 = time.clock()
run_timed(sqlite_unprofiled_insertmany,
'pysqlite bulk insert',
50000)
-
+
run_timed(sa_unprofiled_insertmany,
'SQLAlchemy bulk insert',
50000)
run_profiled(sa_profiled_insert,
'SQLAlchemy individual insert/select, profiled',
1000)
-
+
finally:
metadata.drop_all()
# times how long it takes to create 26000 objects
-import testbase
+import testenv; testenv.simple_setup()
-from sqlalchemy.orm.attributes import *
+from sqlalchemy.orm import attributes
import time
import gc
class Address(object):
pass
-attr_manager = AttributeManager()
if manage_attributes:
- attr_manager.register_attribute(User, 'id', uselist=False)
- attr_manager.register_attribute(User, 'name', uselist=False)
- attr_manager.register_attribute(User, 'addresses', uselist=True, trackparent=True)
- attr_manager.register_attribute(Address, 'email', uselist=False)
+ attributes.register_attribute(User, 'id', False, False)
+ attributes.register_attribute(User, 'name', False, False)
+ attributes.register_attribute(User, 'addresses', True, False, trackparent=True)
+ attributes.register_attribute(Address, 'email', False, False)
now = time.time()
for i in range(0,130):
u = User()
if init_attributes:
- attr_manager.init_attr(u)
+ attributes.manage(u)
u.id = i
u.name = "user " + str(i)
if not manage_attributes:
for j in range(0,200):
a = Address()
if init_attributes:
- attr_manager.init_attr(a)
+ attributes.manage(a)
a.email = 'foo@bar.com'
u.addresses.append(a)
# gc.collect()
-import testbase
+import testenv; testenv.simple_setup()
import gc
import random, string
-from sqlalchemy.orm.attributes import *
+from sqlalchemy.orm import attributes
# with this test, run top. make sure the Python process doenst grow in size arbitrarily.
class User(object):
pass
-
+
class Address(object):
pass
-attr_manager = AttributeManager()
-attr_manager.register_attribute(User, 'id', uselist=False)
-attr_manager.register_attribute(User, 'name', uselist=False)
-attr_manager.register_attribute(User, 'addresses', uselist=True)
-attr_manager.register_attribute(Address, 'email', uselist=False)
-attr_manager.register_attribute(Address, 'user', uselist=False)
-
+attributes.register_attribute(User, 'id', False, False)
+attributes.register_attribute(User, 'name', False, False)
+attributes.register_attribute(User, 'addresses', True, False)
+attributes.register_attribute(Address, 'email', False, False)
+attributes.register_attribute(Address, 'user', False, False)
+
for i in xrange(1000):
for j in xrange(1000):
u = User()
+ attributes.manage(u)
u.name = str(random.randint(0, 100000000))
for k in xrange(10):
a = Address()
a.email_address = str(random.randint(0, 100000000))
+ attributes.manage(a)
u.addresses.append(a)
a.user = u
print "clearing"
#managed_attributes.clear()
gc.collect()
-
-
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
NUM = 500
DIVISOR = 50
-meta = MetaData(testbase.db)
-items = Table('items', meta,
+meta = MetaData(testing.db)
+items = Table('items', meta,
Column('item_id', Integer, primary_key=True),
Column('value', String(100)))
-subitems = Table('subitems', meta,
+subitems = Table('subitems', meta,
Column('sub_id', Integer, primary_key=True),
Column('parent_id', Integer, ForeignKey('items.item_id')),
Column('value', String(100)))
z = ((x-1) * DIVISOR) + y
l.append({'sub_id':z,'value':'this is item #%d' % z, 'parent_id':x})
#print l
- subitems.insert().execute(*l)
+ subitems.insert().execute(*l)
@profiling.profiled('masseagerload', always=True, sort=['cumulative'])
def masseagerload(session):
query = session.query(Item)
- l = query.select()
+ l = query.all()
print "loaded ", len(l), " items each with ", len(l[0].subs), "subitems"
def all():
-import testbase
+import testenv; testenv.configure_for_tests()
import time
#import gc
#import sqlalchemy.orm.attributes as attributes
from sqlalchemy.orm import *
from testlib import *
-NUM = 2500
+"""
+we are testing session.expunge() here, also that the attributes and unitofwork
+packages dont keep dereferenced stuff hanging around.
+
+for best results, dont run with sqlite :memory: database, and keep an eye on
+top while it runs
"""
-we are testing session.expunge() here, also that the attributes and unitofwork packages dont keep dereferenced
-stuff hanging around.
-for best results, dont run with sqlite :memory: database, and keep an eye on top while it runs"""
+NUM = 2500
class LoadTest(AssertMixin):
def setUpAll(self):
global items, meta
- meta = MetaData(testbase.db)
- items = Table('items', meta,
+ meta = MetaData(testing.db)
+ items = Table('items', meta,
Column('item_id', Integer, primary_key=True),
Column('value', String(100)))
items.create()
for y in range(x*500-500 + 1, x*500 + 1):
l.append({'item_id':y, 'value':'this is item #%d' % y})
items.insert().execute(*l)
-
+
def testload(self):
class Item(object):pass
-
+
m = mapper(Item, items)
sess = create_session()
now = time.time()
for x in range (1,NUM/100):
# this is not needed with cpython which clears non-circular refs immediately
#gc.collect()
- l = query.select(items.c.item_id.between(x*100 - 100 + 1, x*100))
+ l = query.filter(items.c.item_id.between(x*100 - 100 + 1, x*100)).all()
assert len(l) == 100
print "loaded ", len(l), " items "
# modifying each object will insure that the objects get placed in the "dirty" list
#objectstore.expunge(*l)
total = time.time() -now
print "total time ", total
-
+
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import sys
-sys.path.insert(0, './lib/')
-
-try:
-# import sqlalchemy.mods.threadlocal
- pass
-except:
- pass
-from sqlalchemy import *
-from testbase import Table, Column
+import testenv; testenv.simple_setup()
import time
+from sqlalchemy import *
+from sqlalchemy.orm import *
-metadata = create_engine('sqlite://', echo=True)
+metadata = MetaData(create_engine('sqlite://', echo=True))
-t1s = Table( 't1s', metadata,
+t1s = Table( 't1s', metadata,
Column( 'id', Integer, primary_key=True),
Column('data', String(100))
- )
+ )
-t2s = Table( 't2s', metadata,
+t2s = Table( 't2s', metadata,
Column( 'id', Integer, primary_key=True),
Column( 't1id', Integer, ForeignKey("t1s.id"), nullable=True ))
-t3s = Table( 't3s', metadata,
+t3s = Table( 't3s', metadata,
Column( 'id', Integer, primary_key=True),
Column( 't2id', Integer, ForeignKey("t2s.id"), nullable=True ))
-t4s = Table( 't4s', metadata,
- Column( 'id', Integer, primary_key=True),
+t4s = Table( 't4s', metadata,
+ Column( 'id', Integer, primary_key=True),
Column( 't3id', Integer, ForeignKey("t3s.id"), nullable=True ))
-
+
[t.create() for t in [t1s,t2s,t3s,t4s]]
class T1( object ): pass
class T2( object ): pass
class T3( object ): pass
-class T4( object ): pass
+class T4( object ): pass
mapper( T1, t1s )
-mapper( T2, t2s )
-mapper( T3, t3s )
-mapper( T4, t4s )
+mapper( T2, t2s )
+mapper( T3, t3s )
+mapper( T4, t4s )
cascade = "all, delete-orphan"
use_backref = True
now = time.time()
print "start"
-sess = create_session()
+sess = create_session()
o1 = T1()
-sess.save(o1)
+sess.save(o1)
for i2 in range(10):
o2 = T2()
o1.t2s.append( o2 )
-
+
for i3 in range( 10 ):
o3 = T3()
o2.t3s.append( o3 )
-
+
for i4 in range( 10 ):
o3.t4s.append ( T4() )
print i2, i3, i4
-print len([s for s in sess])
+print len([s for s in sess])
print "flushing"
sess.flush()
total = time.time() - now
-import testbase
+import testenv; testenv.configure_for_tests()
import types
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
-NUM = 250000
+NUM = 2500
class SaveTest(AssertMixin):
def setUpAll(self):
global items, metadata
- metadata = MetaData(testbase.db)
- items = Table('items', metadata,
+ metadata = MetaData(testing.db)
+ items = Table('items', metadata,
Column('item_id', Integer, primary_key=True),
Column('value', String(100)))
items.create()
def tearDownAll(self):
clear_mappers()
metadata.drop_all()
-
+
def testsave(self):
class Item(object):pass
-
+
m = mapper(Item, items)
-
+
for x in range(0,NUM/50):
sess = create_session()
query = sess.query(Item)
rep.sort(sorter)
for x in rep[0:30]:
print x
+
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.simple_setup()
import time, gc, resource
from sqlalchemy import *
from sqlalchemy.orm import *
-from testlib import *
+
db = create_engine('sqlite://')
metadata = MetaData(db)
-import testbase
+import testenv; testenv.configure_for_tests()
import time, gc, resource
from sqlalchemy import *
from sqlalchemy.orm import *
NUM = 100
-metadata = MetaData(testbase.db)
+metadata = MetaData(testing.db)
Person_table = Table('Person', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(40)),
-import testbase
+import testenv; testenv.configure_for_tests()
import time
from datetime import datetime
q_sub_per_item = 10
q_customers = 1000
- con = testbase.db.connect()
+ con = testing.db.connect()
transaction = con.begin()
data, subdata = [], []
def run_queries():
session = create_session()
# no explicit transaction here.
-
- # build a report of summarizing the last 50 purchases and
+
+ # build a report of summarizing the last 50 purchases and
# the top 20 items from all purchases
q = session.query(Purchase). \
for item in purchase.items:
report.append(item.name)
report.extend([s.name for s in item.subitems])
-
+
# mix a little low-level with orm
# pull a report of the top 20 items of all time
_item_id = purchaseitems.c.item_id
@profiled('all')
def main():
- metadata.bind = testbase.db
+ metadata.bind = testing.db
try:
define_tables()
setup_mappers()
# load test of connection pool
-
-import testbase
+import testenv; testenv.configure_for_tests()
+import thread, time
from sqlalchemy import *
import sqlalchemy.pool as pool
-import thread,time
+from testlib import testing
-db = create_engine(testbase.db.url, pool_timeout=30, echo_pool=True)
+db = create_engine(testing.db.url, pool_timeout=30, echo_pool=True)
metadata = MetaData(db)
users_table = Table('users', metadata,
# time.sleep(.005)
# result.close()
print "runfast cycle complete"
-
-#thread.start_new_thread(runslow, ())
+
+#thread.start_new_thread(runslow, ())
for x in xrange(0,50):
thread.start_new_thread(runfast, ())
"""test that mapper compilation is threadsafe, including
-when additional mappers are created while the existing
+when additional mappers are created while the existing
collection is being compiled."""
-import testbase
+import testenv; testenv.simple_setup()
from sqlalchemy import *
from sqlalchemy.orm import *
import thread, time
from sqlalchemy.orm import mapperlib
-from testlib import *
+
meta = MetaData('sqlite:///foo.db')
-t1 = Table('t1', meta,
+t1 = Table('t1', meta,
Column('c1', Integer, primary_key=True),
Column('c2', String(30))
)
-
+
t2 = Table('t2', meta,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)),
class T2(object):
pass
-
+
class FakeLock(object):
def acquire(self):pass
def release(self):pass
# uncomment this to disable the mutex in mapper compilation;
-# should produce thread collisions
+# should produce thread collisions
#mapperlib._COMPILE_MUTEX = FakeLock()
def run1():
class_mapper(Foo).compile()
foo()
time.sleep(.05)
-
+
mapper(T1, t1, properties={'t2':relation(T2, backref="t1")})
mapper(T2, t2)
print "START"
thread.start_new_thread(run3, ())
print "WAIT"
time.sleep(5)
-
#!/usr/bin/python
"""Uses ``wsgiref``, standard in Python 2.5 and also in the cheeseshop."""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
import thread
logging.getLogger('sqlalchemy.pool').setLevel(logging.INFO)
threadids = set()
-meta = MetaData(testbase.db)
-foo = Table('foo', meta,
+meta = MetaData(testing.db)
+foo = Table('foo', meta,
Column('id', Integer, primary_key=True),
Column('data', String(30)))
class Foo(object):
" total threads ", len(threadids))
return [str("\n".join([x.data for x in l]))]
-
+
if __name__ == '__main__':
from wsgiref import simple_server
try:
server.serve_forever()
finally:
meta.drop_all()
-
-
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
alltests.addTest(unittest.findTestCases(mod, suiteClass=None))
return alltests
+
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
Column('c1', Integer, primary_key=True),
Column('c2', String(30)))
- @profiling.profiled('ctest_insert', call_range=(40, 50), always=True)
+ @profiling.profiled('ctest_insert', call_range=(40, 50), always=True)
def test_insert(self):
t1.insert().compile()
- @profiling.profiled('ctest_update', call_range=(40, 50), always=True)
+ @profiling.profiled('ctest_update', call_range=(40, 50), always=True)
def test_update(self):
t1.update().compile()
# TODO: this is alittle high
- @profiling.profiled('ctest_select', call_range=(110, 140), always=True)
+ @profiling.profiled('ctest_select', call_range=(110, 140), always=True)
def test_select(self):
s = select([t1], t1.c.c2==t2.c.c1)
s.compile()
-
-
+
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
from sqlalchemy.pool import QueuePool
global pool
pool = QueuePool(creator = lambda: sqlite.SQLiteDialect.dbapi().connect(':memory:'), pool_size = 3, max_overflow = -1, use_threadlocal = True)
- # the WeakValueDictionary used for the pool's "threadlocal" idea adds 1-6 method calls to each of these.
- # however its just a lot easier stability wise than dealing with a strongly referencing dict of weakrefs.
- # [ticket:754] immediately got opened when we tried a dict of weakrefs, and though the solution there
- # is simple, it still doesn't solve the issue of "dead" weakrefs sitting in the dict taking up space
-
+ # the WeakValueDictionary used for the pool's "threadlocal" idea adds 1-6
+ # method calls to each of these. however its just a lot easier stability
+ # wise than dealing with a strongly referencing dict of weakrefs.
+ # [ticket:754] immediately got opened when we tried a dict of weakrefs,
+ # and though the solution there is simple, it still doesn't solve the
+ # issue of "dead" weakrefs sitting in the dict taking up space
+
@profiling.profiled('pooltest_connect', call_range=(40, 50), always=True)
def test_first_connect(self):
conn = pool.connect()
conn2 = pool.connect()
return conn2
c2 = go()
-
+
def test_second_samethread_connect(self):
conn = pool.connect()
-
+
@profiling.profiled('pooltest_samethread_connect', call_range=(4, 4), always=True)
def go():
return pool.connect()
c2 = go()
-
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
import datetime
import sys
import time
-import testbase
+import testenv; testenv.configure_for_tests()
from testlib import testing, profiling
-
-
from sqlalchemy import *
+ITERATIONS = 1
+
class ZooMarkTest(testing.AssertMixin):
"""Runs the ZooMark and squawks if method counts vary from the norm.
@profiling.profiled('create', call_range=(1500, 1880), always=True)
def test_1_create_tables(self):
global metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
Zoo = Table('Zoo', metadata,
Column('ID', Integer, Sequence('zoo_id_seq'), primary_key=True, index=True),
def test_8_drop(self):
metadata.drop_all()
-ITERATIONS = 1
-
if __name__ ==u'__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
'sql.constraints',
'sql.generative',
-
+
# SQL syntax
'sql.select',
'sql.selectable',
- 'sql.case_statement',
+ 'sql.case_statement',
'sql.labels',
'sql.unicode',
-
+
# assorted round-trip tests
'sql.functions',
'sql.query',
'sql.quote',
'sql.rowcount',
-
+
# defaults, sequences (postgres/oracle)
'sql.defaults',
)
return alltests
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
-import testbase
+import testenv; testenv.configure_for_tests()
import sys
from sqlalchemy import *
from testlib import *
class CaseTest(PersistTest):
def setUpAll(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
global info_table
info_table = Table('infos', metadata,
- Column('pk', Integer, primary_key=True),
- Column('info', String(30)))
+ Column('pk', Integer, primary_key=True),
+ Column('info', String(30)))
info_table.create()
info_table.insert().execute(
- {'pk':1, 'info':'pk_1_data'},
- {'pk':2, 'info':'pk_2_data'},
- {'pk':3, 'info':'pk_3_data'},
- {'pk':4, 'info':'pk_4_data'},
- {'pk':5, 'info':'pk_5_data'},
- {'pk':6, 'info':'pk_6_data'})
+ {'pk':1, 'info':'pk_1_data'},
+ {'pk':2, 'info':'pk_2_data'},
+ {'pk':3, 'info':'pk_3_data'},
+ {'pk':4, 'info':'pk_4_data'},
+ {'pk':5, 'info':'pk_5_data'},
+ {'pk':6, 'info':'pk_6_data'})
def tearDownAll(self):
info_table.drop()
@testing.fails_on('maxdb')
def testcase(self):
inner = select([case([
- [info_table.c.pk < 3,
+ [info_table.c.pk < 3,
literal('lessthan3', type_=String)],
- [and_(info_table.c.pk >= 3, info_table.c.pk < 7),
+ [and_(info_table.c.pk >= 3, info_table.c.pk < 7),
literal('gt3', type_=String)]]).label('x'),
- info_table.c.pk, info_table.c.info],
+ info_table.c.pk, info_table.c.info],
from_obj=[info_table]).alias('q_inner')
inner_result = inner.execute().fetchall()
]
w_else = select([case([
- [info_table.c.pk < 3,
+ [info_table.c.pk < 3,
literal(3, type_=Integer)],
- [and_(info_table.c.pk >= 3, info_table.c.pk < 6),
+ [and_(info_table.c.pk >= 3, info_table.c.pk < 6),
literal(6, type_=Integer)]],
else_ = 0).label('x'),
- info_table.c.pk, info_table.c.info],
+ info_table.c.pk, info_table.c.info],
from_obj=[info_table]).alias('q_inner')
else_result = w_else.execute().fetchall()
]
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import exceptions
from testlib import *
class ConstraintTest(AssertMixin):
-
+
def setUp(self):
global metadata
- metadata = MetaData(testbase.db)
-
+ metadata = MetaData(testing.db)
+
def tearDown(self):
metadata.drop_all()
-
+
def test_constraint(self):
- employees = Table('employees', metadata,
+ employees = Table('employees', metadata,
Column('id', Integer),
Column('soc', String(40)),
Column('name', String(30)),
metadata.create_all()
def test_circular_constraint(self):
- a = Table("a", metadata,
+ a = Table("a", metadata,
Column('id', Integer, primary_key=True),
Column('bid', Integer),
ForeignKeyConstraint(["bid"], ["b.id"], name="afk")
metadata.create_all()
def test_circular_constraint_2(self):
- a = Table("a", metadata,
+ a = Table("a", metadata,
Column('id', Integer, primary_key=True),
Column('bid', Integer, ForeignKey("b.id")),
)
Column("aid", Integer, ForeignKey("a.id", use_alter=True, name="bfk")),
)
metadata.create_all()
-
+
@testing.unsupported('mysql')
def test_check_constraint(self):
- foo = Table('foo', metadata,
+ foo = Table('foo', metadata,
Column('id', Integer, primary_key=True),
Column('x', Integer),
Column('y', Integer),
CheckConstraint('x>y'))
- bar = Table('bar', metadata,
+ bar = Table('bar', metadata,
Column('id', Integer, primary_key=True),
Column('x', Integer, CheckConstraint('x>7')),
Column('z', Integer)
assert False
except exceptions.SQLError:
assert True
-
+
def test_unique_constraint(self):
foo = Table('foo', metadata,
Column('id', Integer, primary_key=True),
assert False
except exceptions.SQLError:
assert True
-
+
def test_index_create(self):
employees = Table('employees', metadata,
Column('id', Integer, primary_key=True),
Column('last_name', String(30)),
Column('email_address', String(30)))
employees.create()
-
+
i = Index('employee_name_index',
employees.c.last_name, employees.c.first_name)
i.create()
assert i in employees.indexes
-
+
i2 = Index('employee_email_index',
- employees.c.email_address, unique=True)
+ employees.c.email_address, unique=True)
i2.create()
assert i2 in employees.indexes
Column('emailAddress', String(30)))
employees.create()
-
+
i = Index('employeeNameIndex',
employees.c.lastName, employees.c.firstName)
i.create()
-
+
i = Index('employeeEmailIndex',
- employees.c.emailAddress, unique=True)
+ employees.c.emailAddress, unique=True)
i.create()
# Check that the table is useable. This is mostly for pg,
Index('sport_announcer', events.c.sport, events.c.announcer, unique=True)
Index('idx_winners', events.c.winner)
-
+
index_names = [ ix.name for ix in events.indexes ]
assert 'ix_events_name' in index_names
assert 'ix_events_location' in index_names
assert len(index_names) == 4
capt = []
- connection = testbase.db.connect()
+ connection = testing.db.connect()
# TODO: hacky, put a real connection proxy in
ex = connection._Connection__execute_raw
def proxy(context):
capt.append(repr(context.parameters))
ex(context)
connection._Connection__execute_raw = proxy
- schemagen = testbase.db.dialect.schemagenerator(testbase.db.dialect, connection)
+ schemagen = testing.db.dialect.schemagenerator(testing.db.dialect, connection)
schemagen.traverse(events)
-
+
assert capt[0].strip().startswith('CREATE TABLE events')
-
+
s = set([capt[x].strip() for x in [2,4,6,8]])
-
+
assert s == set([
'CREATE UNIQUE INDEX ix_events_name ON events (name)',
'CREATE INDEX ix_events_location ON events (location)',
'CREATE UNIQUE INDEX sport_announcer ON events (sport, announcer)',
'CREATE INDEX idx_winners ON events (winner)'
])
-
+
# verify that the table is functional
events.insert().execute(id=1, name='hockey finals', location='rink',
sport='hockey', announcer='some canadian',
winner='sweden')
ss = events.select().execute().fetchall()
-
-if __name__ == "__main__":
- testbase.main()
+
+if __name__ == "__main__":
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime
from sqlalchemy import *
from sqlalchemy import exceptions, schema, util
def setUpAll(self):
global t, f, f2, ts, currenttime, metadata, default_generator
- db = testbase.db
+ db = testing.db
metadata = MetaData(db)
default_generator = {'x':50}
c = ColumnDefault(fn)
def teststandalone(self):
- c = testbase.db.engine.contextual_connect()
+ c = testing.db.engine.contextual_connect()
x = c.execute(t.c.col1.default)
y = t.c.col2.default.execute()
z = c.execute(t.c.col3.default)
t.insert().execute()
t.insert().execute()
- ctexec = select([currenttime.label('now')], bind=testbase.db).scalar()
+ ctexec = select([currenttime.label('now')], bind=testing.db).scalar()
l = t.select().execute()
today = datetime.date.today()
self.assertEquals(l.fetchall(), [
def testinsertmany(self):
# MySQL-Python 1.2.2 breaks functions in execute_many :(
if (testing.against('mysql') and
- testbase.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
+ testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
return
r = t.insert().execute({}, {}, {})
def testupdatemany(self):
# MySQL-Python 1.2.2 breaks functions in execute_many :(
if (testing.against('mysql') and
- testbase.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
+ testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
return
t.insert().execute({}, {}, {})
key values in memory before insert; otherwise we cant locate the just inserted row."""
try:
- meta = MetaData(testbase.db)
- testbase.db.execute("""
+ meta = MetaData(testing.db)
+ testing.db.execute("""
CREATE TABLE speedy_users
(
speedy_user_id SERIAL PRIMARY KEY,
l = t.select().execute().fetchall()
self.assert_(l == [(1, 'user', 'lala')])
finally:
- testbase.db.execute("drop table speedy_users", None)
+ testing.db.execute("drop table speedy_users", None)
class PKDefaultTest(PersistTest):
def setUpAll(self):
global metadata, t1, t2
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t2 = Table('t2', metadata,
Column('nextid', Integer))
def setUp(self):
global aitable, aimeta
- aimeta = MetaData(testbase.db)
+ aimeta = MetaData(testing.db)
aitable = Table("aitest", aimeta,
Column('id', Integer, Sequence('ai_id_seq', optional=True),
primary_key=True),
@testing.fails_on('sqlite')
def testnonautoincrement(self):
# sqlite INT primary keys can be non-unique! (only for ints)
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
nonai_table = Table("nonaitest", meta,
Column('id', Integer, autoincrement=False, primary_key=True),
Column('data', String(20)))
[(1, 1, None), (2, None, 'row 2'), (3, 3, 'row 3'), (4, 4, None)])
def test_autoincrement_autocommit(self):
- self._test_autoincrement(testbase.db)
+ self._test_autoincrement(testing.db)
def test_autoincrement_transaction(self):
- con = testbase.db.connect()
+ con = testing.db.connect()
tx = con.begin()
try:
try:
con.close()
def test_autoincrement_fk(self):
- if not testbase.db.dialect.supports_pk_autoincrement:
+ if not testing.db.dialect.supports_pk_autoincrement:
return True
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
# No optional sequence here.
nodes = Table('nodes', metadata,
def setUpAll(self):
global cartitems, sometable, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
cartitems = Table("cartitems", metadata,
Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
Column("description", String(40)),
result = sometable.insert().execute(name="somename")
assert 'id' in result.postfetch_cols()
-
+
result = sometable.insert().execute(name="someother")
assert 'id' in result.postfetch_cols()
# maxdb db-api seems to double-execute NEXTVAL internally somewhere,
# throwing off the numbers for these tests...
def test_implicit_sequence_exec(self):
- s = Sequence("my_sequence", metadata=MetaData(testbase.db))
+ s = Sequence("my_sequence", metadata=MetaData(testing.db))
s.create()
try:
x = s.execute()
@testing.fails_on('maxdb')
def teststandalone_explicit(self):
s = Sequence("my_sequence")
- s.create(bind=testbase.db)
+ s.create(bind=testing.db)
try:
- x = s.execute(testbase.db)
+ x = s.execute(testing.db)
self.assert_(x == 1)
finally:
- s.drop(testbase.db)
+ s.drop(testing.db)
def test_checkfirst(self):
s = Sequence("my_sequence")
- s.create(testbase.db, checkfirst=False)
- s.create(testbase.db, checkfirst=True)
- s.drop(testbase.db, checkfirst=False)
- s.drop(testbase.db, checkfirst=True)
+ s.create(testing.db, checkfirst=False)
+ s.create(testing.db, checkfirst=True)
+ s.drop(testing.db, checkfirst=False)
+ s.drop(testing.db, checkfirst=True)
@testing.fails_on('maxdb')
def teststandalone2(self):
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime
from sqlalchemy import *
from sqlalchemy import databases, exceptions, sql
# if the configured dialect is out-of-tree or not yet in __all__, include it
# too.
-if testbase.db.name not in databases.__all__:
- dialects.append(testbase.db.dialect)
+if testing.db.name not in databases.__all__:
+ dialects.append(testing.db.dialect)
class CompileTest(SQLCompileTest):
class ExecuteTest(PersistTest):
def test_standalone_execute(self):
- x = testbase.db.func.current_date().execute().scalar()
- y = testbase.db.func.current_date().select().execute().scalar()
- z = testbase.db.func.current_date().scalar()
+ x = testing.db.func.current_date().execute().scalar()
+ y = testing.db.func.current_date().select().execute().scalar()
+ z = testing.db.func.current_date().scalar()
assert (x == y == z) is True
# ansi func
- x = testbase.db.func.current_date()
+ x = testing.db.func.current_date()
assert isinstance(x.type, Date)
assert isinstance(x.execute().scalar(), datetime.date)
def test_conn_execute(self):
- conn = testbase.db.connect()
+ conn = testing.db.connect()
try:
x = conn.execute(func.current_date()).scalar()
y = conn.execute(func.current_date().select()).scalar()
get overridden.
"""
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
t = Table('t1', meta,
Column('id', Integer, Sequence('t1idseq', optional=True), primary_key=True),
Column('value', Integer)
@testing.fails_on_everything_except('postgres')
def test_as_from(self):
# TODO: shouldnt this work on oracle too ?
- x = testbase.db.func.current_date().execute().scalar()
- y = testbase.db.func.current_date().select().execute().scalar()
- z = testbase.db.func.current_date().scalar()
- w = select(['*'], from_obj=[testbase.db.func.current_date()]).scalar()
+ x = testing.db.func.current_date().execute().scalar()
+ y = testing.db.func.current_date().select().execute().scalar()
+ z = testing.db.func.current_date().scalar()
+ w = select(['*'], from_obj=[testing.db.func.current_date()]).scalar()
# construct a column-based FROM object out of a function, like in [ticket:172]
- s = select([sql.column('date', type_=DateTime)], from_obj=[testbase.db.func.current_date()])
+ s = select([sql.column('date', type_=DateTime)], from_obj=[testing.db.func.current_date()])
q = s.execute().fetchone()[s.c.date]
r = s.alias('datequery').select().scalar()
for row in statement.execute(*args, **kw).fetchall()])
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.sql import table, column, ClauseElement
from sqlalchemy.sql.expression import _clone
self.assert_compile(s, "SELECT table1.col1, table1.col2, table1.col3 FROM table1")
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
from sqlalchemy.engine import default
class LabelTypeTest(PersistTest):
def test_type(self):
m = MetaData()
- t = Table('sometable', m,
+ t = Table('sometable', m,
Column('col1', Integer),
Column('col2', Float))
assert isinstance(t.c.col1.label('hi').type, Integer)
class LongLabelsTest(SQLCompileTest):
def setUpAll(self):
global metadata, table1, maxlen
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table1 = Table("some_large_named_table", metadata,
Column("this_is_the_primarykey_column", Integer, Sequence("this_is_some_large_seq"), primary_key=True),
Column("this_is_the_data_column", String(30))
)
-
+
metadata.create_all()
-
- maxlen = testbase.db.dialect.max_identifier_length
- testbase.db.dialect.max_identifier_length = IDENT_LENGTH
-
+
+ maxlen = testing.db.dialect.max_identifier_length
+ testing.db.dialect.max_identifier_length = IDENT_LENGTH
+
def tearDown(self):
table1.delete().execute()
-
+
def tearDownAll(self):
metadata.drop_all()
- testbase.db.dialect.max_identifier_length = maxlen
-
+ testing.db.dialect.max_identifier_length = maxlen
+
def test_result(self):
table1.insert().execute(**{"this_is_the_primarykey_column":1, "this_is_the_data_column":"data1"})
table1.insert().execute(**{"this_is_the_primarykey_column":2, "this_is_the_data_column":"data2"})
(3, "data3"),
(4, "data4"),
], repr(result)
-
+
def test_colbinds(self):
table1.insert().execute(**{"this_is_the_primarykey_column":1, "this_is_the_data_column":"data1"})
table1.insert().execute(**{"this_is_the_primarykey_column":2, "this_is_the_data_column":"data2"})
table1.c.this_is_the_primarykey_column == 2
)).execute()
assert r.fetchall() == [(2, "data2"), (4, "data4")]
-
+
def test_insert_no_pk(self):
table1.insert().execute(**{"this_is_the_data_column":"data1"})
table1.insert().execute(**{"this_is_the_data_column":"data2"})
table1.insert().execute(**{"this_is_the_data_column":"data3"})
table1.insert().execute(**{"this_is_the_data_column":"data4"})
-
+
def test_subquery(self):
- # this is the test that fails if the "max identifier length" is shorter than the
+ # this is the test that fails if the "max identifier length" is shorter than the
# length of the actual columns created, because the column names get truncated.
# if you try to separate "physical columns" from "labels", and only truncate the labels,
# the compiler.DefaultCompiler.visit_select() logic which auto-labels columns in a subquery (for the purposes of sqlite compat) breaks the code,
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
x = select([q], use_labels=True)
- self.assert_compile(x, "SELECT anon_1.this_is_the_primarykey_column AS anon_1_this_is_the_prim_1, anon_1.this_is_the_data_column AS anon_1_this_is_the_data_2 "
+ self.assert_compile(x, "SELECT anon_1.this_is_the_primarykey_column AS anon_1_this_is_the_prim_1, anon_1.this_is_the_data_column AS anon_1_this_is_the_data_2 "
"FROM (SELECT some_large_named_table.this_is_the_primarykey_column AS this_is_the_primarykey_column, some_large_named_table.this_is_the_data_column AS this_is_the_data_column "
"FROM some_large_named_table "
"WHERE some_large_named_table.this_is_the_primarykey_column = :some_large_named_table__1) AS anon_1", dialect=compile_dialect)
-
+
print x.execute().fetchall()
-
+
def test_oid(self):
"""test that a primary key column compiled as the 'oid' column gets proper length truncation"""
from sqlalchemy.databases import postgres
assert str(x).endswith("""ORDER BY foo.some_large_named_table_t_2""")
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime
from sqlalchemy import *
from sqlalchemy import exceptions, sql
def setUpAll(self):
global users, addresses, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
users = Table('query_users', metadata,
Column('user_id', INT, primary_key = True),
Column('user_name', VARCHAR(20)),
{'id':'id1', 'bar':'hi'},
),
]:
- if testbase.db.name in supported['unsupported']:
+ if testing.db.name in supported['unsupported']:
continue
try:
table.create()
def test_compiled_execute(self):
users.insert().execute(user_id = 7, user_name = 'jack')
s = select([users], users.c.user_id==bindparam('id')).compile()
- c = testbase.db.connect()
+ c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
def test_compiled_insert_execute(self):
users.insert().compile().execute(user_id = 7, user_name = 'jack')
s = select([users], users.c.user_id==bindparam('id')).compile()
- c = testbase.db.connect()
+ c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
def test_repeated_bindparams(self):
def test_bindparams_in_params(self):
"""test that a _BindParamClause itself can be a key in the params dict"""
-
+
users.insert().execute(user_id = 7, user_name = 'jack')
users.insert().execute(user_id = 8, user_name = 'fred')
u = bindparam('userid')
r = users.select(users.c.user_name==u).execute({u:'fred'}).fetchall()
assert len(r) == 1
-
+
def test_bindparam_shortname(self):
"""test the 'shortname' field on BindParamClause."""
users.insert().execute(user_id = 7, user_name = 'jack')
self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
self.assert_(r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
- r = text("select * from query_users where user_id=2", bind=testbase.db).execute().fetchone()
+ r = text("select * from query_users where user_id=2", bind=testing.db).execute().fetchone()
self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
self.assert_(r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
# test slices
- r = text("select * from query_addresses", bind=testbase.db).execute().fetchone()
+ r = text("select * from query_addresses", bind=testing.db).execute().fetchone()
self.assert_(r[0:1] == (1,))
self.assert_(r[1:] == (2, 'foo@bar.com'))
self.assert_(r[:-1] == (1, 2))
-
+
# test a little sqlite weirdness - with the UNION, cols come back as "query_users.user_id" in cursor.description
r = text("select query_users.user_id, query_users.user_name from query_users "
- "UNION select query_users.user_id, query_users.user_name from query_users", bind=testbase.db).execute().fetchone()
+ "UNION select query_users.user_id, query_users.user_name from query_users", bind=testing.db).execute().fetchone()
self.assert_(r['user_id']) == 1
self.assert_(r['user_name']) == "john"
# test using literal tablename.colname
- r = text('select query_users.user_id AS "query_users.user_id", query_users.user_name AS "query_users.user_name" from query_users', bind=testbase.db).execute().fetchone()
+ r = text('select query_users.user_id AS "query_users.user_id", query_users.user_name AS "query_users.user_name" from query_users', bind=testing.db).execute().fetchone()
self.assert_(r['query_users.user_id']) == 1
self.assert_(r['query_users.user_name']) == "john"
-
+
def test_ambiguous_column(self):
users.insert().execute(user_id=1, user_name='john')
r = users.select().execute().fetchone()
self.assertEqual(len(r), 2)
r.close()
- r = testbase.db.execute('select user_name, user_id from query_users', {}).fetchone()
+ r = testing.db.execute('select user_name, user_id from query_users', {}).fetchone()
self.assertEqual(len(r), 2)
r.close()
- r = testbase.db.execute('select user_name from query_users', {}).fetchone()
+ r = testing.db.execute('select user_name from query_users', {}).fetchone()
self.assertEqual(len(r), 1)
r.close()
def test_column_order_with_text_query(self):
# should return values in query order
users.insert().execute(user_id=1, user_name='foo')
- r = testbase.db.execute('select user_name, user_id from query_users', {}).fetchone()
+ r = testing.db.execute('select user_name, user_id from query_users', {}).fetchone()
self.assertEqual(r[0], 'foo')
self.assertEqual(r[1], 1)
self.assertEqual([x.lower() for x in r.keys()], ['user_name', 'user_id'])
@testing.unsupported('oracle', 'firebird', 'maxdb')
def test_column_accessor_shadow(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
shadowed = Table('test_shadowed', meta,
Column('shadow_id', INT, primary_key = True),
Column('shadow_name', VARCHAR(20)),
different databases."""
def setUpAll(self):
global metadata, t1, t2, t3
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
Column('col1', Integer, Sequence('t1pkseq'), primary_key=True),
Column('col2', String(30)),
global metadata
global t1, t2, t3
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
Column('t1_id', Integer, primary_key=True),
Column('name', String(32)))
found = sorted([tuple(row)
for row in statement.execute().fetchall()])
-
+
self.assertEquals(found, sorted(expected))
def test_join_x1(self):
class OperatorTest(PersistTest):
def setUpAll(self):
global metadata, flds
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
flds = Table('flds', metadata,
Column('idcol', Integer, Sequence('t1pkseq'), primary_key=True),
Column('intcol', Integer),
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy import sql
from sqlalchemy.sql import compiler
# such as: spaces, quote characters, punctuation characters, set up tests for those as
# well.
global table1, table2, table3
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
table1 = Table('WorstCase1', metadata,
Column('lowercase', Integer, primary_key=True),
Column('UPPERCASE', Integer),
Column('MixedCase', Integer))
table1.create()
table2.create()
-
+
def tearDown(self):
table1.delete().execute()
table2.delete().execute()
-
+
def tearDownAll(self):
table1.drop()
table2.drop()
-
+
def testbasic(self):
table1.insert().execute({'lowercase':1,'UPPERCASE':2,'MixedCase':3,'a123':4},
{'lowercase':2,'UPPERCASE':2,'MixedCase':3,'a123':4},
table2.insert().execute({'d123':1,'u123':2,'MixedCase':3},
{'d123':2,'u123':2,'MixedCase':3},
{'d123':4,'u123':3,'MixedCase':2})
-
+
res1 = select([table1.c.lowercase, table1.c.UPPERCASE, table1.c.MixedCase, table1.c.a123]).execute().fetchall()
print res1
assert(res1==[(1,2,3,4),(2,2,3,4),(4,3,2,1)])
-
+
res2 = select([table2.c.d123, table2.c.u123, table2.c.MixedCase]).execute().fetchall()
print res2
assert(res2==[(1,2,3),(2,2,3),(4,3,2)])
-
+
def testreflect(self):
- meta2 = MetaData(testbase.db)
+ meta2 = MetaData(testing.db)
t2 = Table('WorstCase2', meta2, autoload=True, quote=True)
assert 'MixedCase' in t2.c
table2.insert().execute({'d123':1,'u123':2,'MixedCase':3},
{'d123':2,'u123':2,'MixedCase':3},
{'d123':4,'u123':3,'MixedCase':2})
-
+
res1 = select([table1.c.lowercase, table1.c.UPPERCASE, table1.c.MixedCase, table1.c.a123], use_labels=True).execute().fetchall()
print res1
assert(res1==[(1,2,3,4),(2,2,3,4),(4,3,2,1)])
-
+
res2 = select([table2.c.d123, table2.c.u123, table2.c.MixedCase], use_labels=True).execute().fetchall()
print res2
assert(res2==[(1,2,3),(2,2,3),(4,3,2)])
-
- @testing.unsupported('oracle')
+
+ @testing.unsupported('oracle')
def testlabels(self):
"""test the quoting of labels.
-
+
if labels arent quoted, a query in postgres in particular will fail since it produces:
-
- SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC"
+
+ SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC"
FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase, "WorstCase1"."UPPERCASE" AS UPPERCASE, "WorstCase1"."MixedCase" AS MixedCase, "WorstCase1"."ASC" AS ASC \nFROM "WorstCase1") AS LaLa
-
+
where the "UPPERCASE" column of "LaLa" doesnt exist.
"""
x = table1.select(distinct=True).alias("LaLa").select().scalar()
def testlabels2(self):
metadata = MetaData()
- table = Table("ImATable", metadata,
+ table = Table("ImATable", metadata,
Column("col1", Integer))
x = select([table.c.col1.label("ImATable_col1")]).alias("SomeAlias")
assert str(select([x.c.ImATable_col1])) == '''SELECT "SomeAlias"."ImATable_col1" \nFROM (SELECT "ImATable".col1 AS "ImATable_col1" \nFROM "ImATable") AS "SomeAlias"'''
x = select([sql.literal_column("'foo'").label("somelabel")], from_obj=[table]).alias("AnAlias")
x = x.select()
assert str(x) == '''SELECT "AnAlias".somelabel \nFROM (SELECT 'foo' AS somelabel \nFROM "ImATable") AS "AnAlias"'''
-
+
x = select([sql.literal_column("'FooCol'").label("SomeLabel")], from_obj=[table])
x = x.select()
assert str(x) == '''SELECT "SomeLabel" \nFROM (SELECT 'FooCol' AS "SomeLabel" \nFROM "ImATable")'''
-
+
class PreparerTest(PersistTest):
"""Test the db-agnostic quoting services of IdentifierPreparer."""
a_eq(unformat('foo.`bar`'), ['foo', 'bar'])
a_eq(unformat('`foo`.bar'), ['foo', 'bar'])
a_eq(unformat('`foo`.`b``a``r`.`baz`'), ['foo', 'b`a`r', 'baz'])
-
+
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
class FoundRowsTest(AssertMixin):
"""tests rowcount functionality"""
def setUpAll(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
global employees_table
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
print "expecting 3, dialect reports %s" % r.rowcount
- if testbase.db.dialect.supports_sane_rowcount:
+ if testing.db.dialect.supports_sane_rowcount:
assert r.rowcount == 3
def test_update_rowcount2(self):
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
print "expecting 3, dialect reports %s" % r.rowcount
- if testbase.db.dialect.supports_sane_rowcount:
+ if testing.db.dialect.supports_sane_rowcount:
assert r.rowcount == 3
def test_delete_rowcount(self):
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
print "expecting 3, dialect reports %s" % r.rowcount
- if testbase.db.dialect.supports_sane_rowcount:
+ if testing.db.dialect.supports_sane_rowcount:
assert r.rowcount == 3
if __name__ == '__main__':
- testbase.main()
-
-
-
-
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import re, operator
from sqlalchemy import *
from sqlalchemy import exceptions, sql, util
clause = (table1.c.myid == 12) & table1.c.myid.between(15, 20) & table1.c.myid.like('hoho')
assert str(clause) == str(util.pickle.loads(util.pickle.dumps(clause)))
-
+
def testextracomparisonoperators(self):
self.assert_compile(
"(:rem_id, :datatype_id, :value)")
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
useable primary keys and foreign keys. Full relational algebra depends on
every selectable unit behaving nicely with others.."""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
assert not t2.select().alias('foo').is_derived_from(t1)
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
-import testbase
+import testenv; testenv.configure_for_tests()
import datetime, os, pickleable, re
from sqlalchemy import *
from sqlalchemy import types, exceptions
def testbasic(self):
print users.c.goofy4.type
- print users.c.goofy4.type.dialect_impl(testbase.db.dialect)
- print users.c.goofy4.type.dialect_impl(testbase.db.dialect).get_col_spec()
+ print users.c.goofy4.type.dialect_impl(testing.db.dialect)
+ print users.c.goofy4.type.dialect_impl(testing.db.dialect).get_col_spec()
def testprocessing(self):
def copy(self):
return LegacyUnicodeType(self.impl.length)
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
users = Table('type_users', metadata,
Column('user_id', Integer, primary_key = True),
# totall custom type
'float_column': 'float_column FLOAT(25)',
}
- db = testbase.db
+ db = testing.db
if testing.against('sqlite', 'oracle'):
expectedResults['float_column'] = 'float_column NUMERIC(25, 2)'
"""tests the Unicode type. also tests the TypeDecorator with instances in the types package."""
def setUpAll(self):
global unicode_table
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
unicode_table = Table('unicode_table', metadata,
Column('id', Integer, Sequence('uni_id_seq', optional=True), primary_key=True),
Column('unicode_varchar', Unicode(250)),
# SQLLite and MSSQL return non-unicode data as unicode
self.assert_(testing.against('sqlite', 'mssql'))
self.assert_(x['plain_varchar'] == unicodedata)
- print "it's %s!" % testbase.db.name
+ print "it's %s!" % testing.db.name
else:
self.assert_(not isinstance(x['plain_varchar'], unicode) and x['plain_varchar'] == rawdata)
def testengineparam(self):
"""tests engine-wide unicode conversion"""
- prev_unicode = testbase.db.engine.dialect.convert_unicode
- prev_assert = testbase.db.engine.dialect.assert_unicode
+ prev_unicode = testing.db.engine.dialect.convert_unicode
+ prev_assert = testing.db.engine.dialect.assert_unicode
try:
- testbase.db.engine.dialect.convert_unicode = True
- testbase.db.engine.dialect.assert_unicode = False
+ testing.db.engine.dialect.convert_unicode = True
+ testing.db.engine.dialect.assert_unicode = False
rawdata = 'Alors vous imaginez ma surprise, au lever du jour, quand une dr\xc3\xb4le de petit voix m\xe2\x80\x99a r\xc3\xa9veill\xc3\xa9. Elle disait: \xc2\xab S\xe2\x80\x99il vous pla\xc3\xaet\xe2\x80\xa6 dessine-moi un mouton! \xc2\xbb\n'
unicodedata = rawdata.decode('utf-8')
unicode_table.insert().execute(unicode_varchar=unicodedata,
self.assert_(isinstance(x['unicode_text'], unicode) and x['unicode_text'] == unicodedata)
self.assert_(isinstance(x['plain_varchar'], unicode) and x['plain_varchar'] == unicodedata)
finally:
- testbase.db.engine.dialect.convert_unicode = prev_unicode
- testbase.db.engine.dialect.convert_unicode = prev_assert
+ testing.db.engine.dialect.convert_unicode = prev_unicode
+ testing.db.engine.dialect.convert_unicode = prev_assert
@testing.unsupported('oracle')
def testlength(self):
"""checks the database correctly understands the length of a unicode string"""
teststr = u'aaa\x1234'
- self.assert_(testbase.db.func.length(teststr).scalar() == len(teststr))
+ self.assert_(testing.db.func.length(teststr).scalar() == len(teststr))
class BinaryTest(AssertMixin):
def setUpAll(self):
value.stuff = 'this is the right stuff'
return value
- binary_table = Table('binary_table', MetaData(testbase.db),
+ binary_table = Table('binary_table', MetaData(testing.db),
Column('primary_id', Integer, Sequence('binary_id_seq', optional=True), primary_key=True),
Column('data', Binary),
Column('data_slice', Binary(100)),
for stmt in (
binary_table.select(order_by=binary_table.c.primary_id),
- text("select * from binary_table order by binary_table.primary_id", typemap={'pickled':PickleType, 'mypickle':MyPickleType}, bind=testbase.db)
+ text("select * from binary_table order by binary_table.primary_id", typemap={'pickled':PickleType, 'mypickle':MyPickleType}, bind=testing.db)
):
l = stmt.execute().fetchall()
print type(stream1), type(l[0]['data']), type(l[0]['data_slice'])
self.assertEquals(l[0]['mypickle'].stuff, 'this is the right stuff')
def load_stream(self, name, len=12579):
- f = os.path.join(os.path.dirname(testbase.__file__), name)
+ f = os.path.join(os.path.dirname(testenv.__file__), name)
# put a number less than the typical MySQL default BLOB size
return file(f).read(len)
def adapt_operator(self, op):
return {operators.add:operators.sub, operators.sub:operators.add}.get(op, op)
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
test_table = Table('test', meta,
Column('id', Integer, primary_key=True),
Column('data', String(30)),
meta.drop_all()
def test_control(self):
- assert testbase.db.execute("select value from test").scalar() == 250
+ assert testing.db.execute("select value from test").scalar() == 250
assert test_table.select().execute().fetchall() == [(1, 'somedata', datetime.date(2007, 10, 15), 25)]
expr = test_table.c.timestamp == bindparam("thedate")
assert expr.right.type.__class__ == test_table.c.timestamp.type.__class__
- assert testbase.db.execute(test_table.select().where(expr), {"thedate":datetime.date(2007, 10, 15)}).fetchall() == [(1, 'somedata', datetime.date(2007, 10, 15), 25)]
+ assert testing.db.execute(test_table.select().where(expr), {"thedate":datetime.date(2007, 10, 15)}).fetchall() == [(1, 'somedata', datetime.date(2007, 10, 15), 25)]
expr = test_table.c.value == bindparam("somevalue")
assert expr.right.type.__class__ == test_table.c.value.type.__class__
- assert testbase.db.execute(test_table.select().where(expr), {"somevalue":25}).fetchall() == [(1, 'somedata', datetime.date(2007, 10, 15), 25)]
+ assert testing.db.execute(test_table.select().where(expr), {"somevalue":25}).fetchall() == [(1, 'somedata', datetime.date(2007, 10, 15), 25)]
def test_operator_adapt(self):
# test string concatenation
expr = test_table.c.data + "somedata"
- assert testbase.db.execute(select([expr])).scalar() == "somedatasomedata"
+ assert testing.db.execute(select([expr])).scalar() == "somedatasomedata"
expr = test_table.c.id + 15
- assert testbase.db.execute(select([expr])).scalar() == 16
+ assert testing.db.execute(select([expr])).scalar() == 16
# test custom operator conversion
expr = test_table.c.value + 40
# + operator converted to -
# value is calculated as: (250 - (40 * 10)) / 10 == -15
- assert testbase.db.execute(select([expr.label('foo')])).scalar() == -15
+ assert testing.db.execute(select([expr.label('foo')])).scalar() == -15
# this one relies upon anonymous labeling to assemble result
# processing rules on the column.
- assert testbase.db.execute(select([expr])).scalar() == -15
+ assert testing.db.execute(select([expr])).scalar() == -15
class DateTest(AssertMixin):
def setUpAll(self):
global users_with_date, insert_data
- db = testbase.db
+ db = testing.db
if testing.against('oracle'):
import sqlalchemy.databases.oracle as oracle
insert_data = [
Column('user_time', Time)]
users_with_date = Table('query_users_with_date',
- MetaData(testbase.db), *collist)
+ MetaData(testing.db), *collist)
users_with_date.create()
insert_dicts = [dict(zip(fnames, d)) for d in insert_data]
'DateTest mismatch: got:%s expected:%s' % (l, insert_data))
def testtextdate(self):
- x = testbase.db.text(
+ x = testing.db.text(
"select user_datetime from query_users_with_date",
typemap={'user_datetime':DateTime}).execute().fetchall()
print repr(x)
self.assert_(isinstance(x[0][0], datetime.datetime))
- x = testbase.db.text(
+ x = testing.db.text(
"select * from query_users_with_date where user_datetime=:somedate",
bindparams=[bindparam('somedate', type_=types.DateTime)]).execute(
somedate=datetime.datetime(2005, 11, 10, 11, 52, 35)).fetchall()
print repr(x)
def testdate2(self):
- meta = MetaData(testbase.db)
+ meta = MetaData(testing.db)
t = Table('testdate', meta,
Column('id', Integer,
Sequence('datetest_id_seq', optional=True),
class StringTest(AssertMixin):
def test_nolen_string_deprecated(self):
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
foo =Table('foo', metadata,
Column('one', String))
# no warning
- select([func.count("*")], bind=testbase.db).execute()
+ select([func.count("*")], bind=testing.db).execute()
try:
# warning during CREATE
class NumericTest(AssertMixin):
def setUpAll(self):
global numeric_table, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
numeric_table = Table('numeric_table', metadata,
Column('id', Integer, Sequence('numeric_id_seq', optional=True), primary_key=True),
Column('numericcol', Numeric(asdecimal=False)),
class IntervalTest(AssertMixin):
def setUpAll(self):
global interval_table, metadata
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
interval_table = Table("intervaltable", metadata,
Column("id", Integer, Sequence('interval_id_seq', optional=True), primary_key=True),
Column("interval", Interval),
class BooleanTest(AssertMixin):
def setUpAll(self):
global bool_table
- metadata = MetaData(testbase.db)
+ metadata = MetaData(testing.db)
bool_table = Table('booltest', metadata,
Column('id', Integer, primary_key=True),
Column('value', Boolean))
assert(res2==[(2, False)])
if __name__ == "__main__":
- testbase.main()
+ testenv.main()
# coding: utf-8
"""verrrrry basic unicode column name testing"""
-import testbase
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
from testlib.engines import utf8_engine
t3.delete().execute()
t2.delete().execute()
t1.delete().execute()
-
+
@testing.unsupported('maxdb', 'oracle', 'sybase')
def tearDownAll(self):
global unicode_bind
metadata.drop_all()
del unicode_bind
-
+
@testing.unsupported('maxdb', 'oracle', 'sybase')
def test_insert(self):
t1.insert().execute({u'méil':1, u'\u6e2c\u8a66':5})
assert t1.select().execute().fetchall() == [(1, 5)]
assert t2.select().execute().fetchall() == [(1, 1)]
assert t3.select().execute().fetchall() == [(1, 5, 1, 1)]
-
+
@testing.unsupported('maxdb', 'oracle', 'sybase')
def test_reflect(self):
t1.insert().execute({u'méil':2, u'\u6e2c\u8a66':7})
[(2, 7, 2, 2), (1, 5, 1, 1)])
meta.drop_all()
metadata.create_all()
-
+
if __name__ == '__main__':
- testbase.main()
+ testenv.main()
+++ /dev/null
-"""First import for all test cases, sets sys.path and loads configuration."""
-
-__all__ = 'db',
-
-import sys, os, logging
-sys.path.insert(0, os.path.join(os.getcwd(), 'lib'))
-logging.basicConfig()
-
-import testlib.config
-testlib.config.configure()
-
-from testlib.testing import main
-db = testlib.config.db
-
--- /dev/null
+"""First import for all test cases, sets sys.path and loads configuration."""
+
+import sys, os, logging
+from testlib.testing import main
+import testlib.config
+
+
+_setup = False
+
+def configure_for_tests():
+ """import testenv; testenv.configure_for_tests()"""
+
+ global _setup
+ if not _setup:
+ sys.path.insert(0, os.path.join(os.getcwd(), 'lib'))
+ logging.basicConfig()
+
+ testlib.config.configure()
+ _setup = True
+
+def simple_setup():
+ """import testenv; testenv.simple_setup()"""
+
+ global _setup
+ if not _setup:
+ sys.path.insert(0, os.path.join(os.getcwd(), 'lib'))
+ logging.basicConfig()
+
+ testlib.config.configure_defaults()
+ _setup = True
-import testbase
import optparse, os, sys, re, ConfigParser, StringIO, time, warnings
logging, require = None, None
return options, file_config
+def configure_defaults():
+ global options, config
+ global getopts_options, file_config
+ global db
+
+ file_config = ConfigParser.ConfigParser()
+ file_config.readfp(StringIO.StringIO(base_config))
+ file_config.read(['test.cfg', os.path.expanduser('~/.satest.cfg')])
+ (options, args) = parser.parse_args([])
+
+ # make error messages raised by decorators that depend on a default
+ # database clearer.
+ class _engine_bomb(object):
+ def __getattr__(self, key):
+ raise RuntimeError('No default engine available, testlib '
+ 'was configured with defaults only.')
+
+ db = _engine_bomb()
+ import testlib.testing
+ testlib.testing.db = db
+
+ return options, file_config
+
def _log(option, opt_str, value, parser):
global logging
if not logging:
def __iter__(self):
for key in self._keys:
yield self._data[key]
-
+
# at one point in refactoring, modules were injecting into the config
# process. this could probably just become a list now.
post_configure = _ordered_map()
post_configure['engine_pool'] = _engine_pool
def _create_testing_engine(options, file_config):
- from testlib import engines
+ from testlib import engines, testing
global db
db = engines.testing_engine(db_url, db_opts)
+ testing.db = db
post_configure['create_engine'] = _create_testing_engine
def _prep_testing_database(options, file_config):
def _set_table_options(options, file_config):
import testlib.schema
-
+
table_options = testlib.schema.table_options
for spec in options.tableopts:
key, value = spec.split('=')
def _set_profile_targets(options, file_config):
from testlib import profiling
-
+
profile_config = profiling.profile_config
for target in options.profile_targets:
-import testbase
+# can't be imported until the path is setup; be sure to configure
+# first if covering.
from sqlalchemy import *
from sqlalchemy import util
from testlib import *
def __init__(self, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
-
+
# TODO: add recursion checks to this
def __repr__(self):
return "%s(%s)" % (
- (self.__class__.__name__),
+ (self.__class__.__name__),
','.join(["%s=%s" % (key, repr(getattr(self, key))) for key in self.__dict__ if not key.startswith('_')])
)
-
+
def __ne__(self, other):
return not self.__eq__(other)
-
+
def __eq__(self, other):
"""'passively' compare this object to another.
-
+
only look at attributes that are present on the source object.
-
+
"""
if self in _recursion_stack:
else:
a = self
b = other
-
+
for attr in a.__dict__.keys():
if attr[0] == '_':
continue
return True
finally:
_recursion_stack.remove(self)
-
+
class User(Base):pass
class Order(Base):pass
class Item(Base):pass
Column('isopen', Integer)
)
-addresses = Table('addresses', metadata,
+addresses = Table('addresses', metadata,
Column('id', Integer, primary_key=True),
Column('user_id', None, ForeignKey('users.id')),
Column('email_address', String(50), nullable=False))
-dingalings = Table("dingalings", metadata,
+dingalings = Table("dingalings", metadata,
Column('id', Integer, primary_key=True),
Column('address_id', None, ForeignKey('addresses.id')),
Column('data', String(30))
)
-
-items = Table('items', metadata,
+
+items = Table('items', metadata,
Column('id', Integer, primary_key=True),
Column('description', String(30), nullable=False)
)
Column('item_id', None, ForeignKey('items.id')),
Column('order_id', None, ForeignKey('orders.id')))
-item_keywords = Table('item_keywords', metadata,
+item_keywords = Table('item_keywords', metadata,
Column('item_id', None, ForeignKey('items.id')),
Column('keyword_id', None, ForeignKey('keywords.id')))
-keywords = Table('keywords', metadata,
+keywords = Table('keywords', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(30), nullable=False)
)
# this many-to-many table has the keywords inserted
# in primary key order, to appease the unit tests.
- # this is because postgres, oracle, and sqlite all support
+ # this is because postgres, oracle, and sqlite all support
# true insert-order row id, but of course our pal MySQL does not,
# so the best it can do is order by, well something, so there you go.
item_keywords.insert().execute(
class FixtureTest(ORMTest):
refresh_data = False
-
+
def setUpAll(self):
super(FixtureTest, self).setUpAll()
if self.keep_data:
install_fixture_data()
-
+
def setUp(self):
if self.refresh_data:
install_fixture_data()
-
+
def define_tables(self, meta):
pass
FixtureTest.metadata = metadata
-
+
class Fixtures(object):
@property
def user_address_result(self):
return [
User(id=7, addresses=[
Address(id=1)
- ]),
+ ]),
User(id=8, addresses=[
Address(id=2, email_address='ed@wood.com'),
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
- ]),
+ ]),
User(id=9, addresses=[
Address(id=5)
- ]),
+ ]),
User(id=10, addresses=[])
]
Order(description='order 1', items=[Item(description='item 1'), Item(description='item 2'), Item(description='item 3')]),
Order(description='order 3'),
Order(description='order 5'),
- ]),
+ ]),
User(id=8, addresses=[
Address(id=2),
Address(id=3),
Address(id=4)
- ]),
+ ]),
User(id=9, addresses=[
Address(id=5)
], orders=[
Order(description='order 2', items=[Item(description='item 1'), Item(description='item 2'), Item(description='item 3')]),
Order(description='order 4', items=[Item(description='item 1'), Item(description='item 5')]),
- ]),
+ ]),
User(id=10, addresses=[])
]
Order(id=4, items=[Item(id=1), Item(id=5)]),
]),
User(id=10)
- ]
-
+ ]
+
@property
def item_keyword_result(self):
return [
-import testbase
-from testlib import config
import inspect, re
+from testlib import config
orm = None
__all__ = 'mapper',
"""Profiling support for unit and performance tests."""
-import testbase
import os, sys
from testlib.config import parser, post_configure
import testlib.config
-import testbase
from testlib import testing
schema = None
kw.update(table_options)
- if testbase.db.name == 'mysql':
+ if testing.against('mysql'):
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
-import testbase
+# can't be imported until the path is setup; be sure to configure
+# first if covering.
from sqlalchemy import *
+from testlib import testing
from testlib.schema import Table, Column
-# these are older test fixtures, used primarily by test/orm/mapper.py and test/orm/unitofwork.py.
-# newer unit tests make usage of test/orm/fixtures.py.
+# these are older test fixtures, used primarily by test/orm/mapper.py and
+# test/orm/unitofwork.py. newer unit tests make usage of
+# test/orm/fixtures.py.
metadata = MetaData()
Column('name', VARCHAR(50)),
)
-userkeywords = Table('userkeywords', metadata,
+userkeywords = Table('userkeywords', metadata,
Column('user_id', INT, ForeignKey("users")),
Column('keyword_id', INT, ForeignKey("keywords")),
)
def create():
if not metadata.bind:
- metadata.bind = testbase.db
+ metadata.bind = testing.db
metadata.create_all()
def drop():
if not metadata.bind:
- metadata.bind = testbase.db
+ metadata.bind = testing.db
metadata.drop_all()
def delete():
for t in metadata.table_iterator(reverse=True):
t.delete().execute()
def user_data():
if not metadata.bind:
- metadata.bind = testbase.db
+ metadata.bind = testing.db
users.insert().execute(
dict(user_id = 7, user_name = 'jack'),
dict(user_id = 8, user_name = 'ed'),
)
def delete_user_data():
users.delete().execute()
-
+
def data():
delete()
-
+
# with SQLITE, the OID column of a table defaults to the primary key, if it has one.
# so to database-neutrally get rows back in "insert order" based on OID, we
# have to also put the primary keys in order for the purpose of these tests
dict(keyword_id=6, name='round'),
dict(keyword_id=7, name='square')
)
-
+
# this many-to-many table has the keywords inserted
# in primary key order, to appease the unit tests.
- # this is because postgres, oracle, and sqlite all support
+ # this is because postgres, oracle, and sqlite all support
# true insert-order row id, but of course our pal MySQL does not,
# so the best it can do is order by, well something, so there you go.
itemkeywords.insert().execute(
class BaseObject(object):
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, ",".join("%s=%s" % (k, repr(v)) for k, v in self.__dict__.iteritems() if k[0] != '_'))
-
+
class User(BaseObject):
def __init__(self):
self.user_id = None
class Item(BaseObject):
pass
-
+
class Keyword(BaseObject):
pass
{'user_id' : 9, 'addresses' : (Address, [])}
]
-user_address_orders_result = [{'user_id' : 7,
+user_address_orders_result = [{'user_id' : 7,
'addresses' : (Address, [{'address_id' : 1}]),
'orders' : (Order, [{'order_id' : 1}, {'order_id' : 3},{'order_id' : 5},])
},
- {'user_id' : 8,
+ {'user_id' : 8,
'addresses' : (Address, [{'address_id' : 2}, {'address_id' : 3}, {'address_id' : 4}]),
'orders' : (Order, [])
},
- {'user_id' : 9,
+ {'user_id' : 9,
'addresses' : (Address, []),
'orders' : (Order, [{'order_id' : 2},{'order_id' : 4}])
}]
user_all_result = [
-{'user_id' : 7,
+{'user_id' : 7,
'addresses' : (Address, [{'address_id' : 1}]),
'orders' : (Order, [
- {'order_id' : 1, 'items': (Item, [])},
+ {'order_id' : 1, 'items': (Item, [])},
{'order_id' : 3, 'items': (Item, [{'item_id':3, 'item_name':'item 3'}, {'item_id':4, 'item_name':'item 4'}, {'item_id':5, 'item_name':'item 5'}])},
{'order_id' : 5, 'items': (Item, [])},
])
},
-{'user_id' : 8,
+{'user_id' : 8,
'addresses' : (Address, [{'address_id' : 2}, {'address_id' : 3}, {'address_id' : 4}]),
'orders' : (Order, [])
},
-{'user_id' : 9,
+{'user_id' : 9,
'addresses' : (Address, []),
'orders' : (Order, [
{'order_id' : 2, 'items': (Item, [{'item_id':1, 'item_name':'item 1'}, {'item_id':2, 'item_name':'item 2'}])},
{'order_id' : 4, 'items':(Item, [])},
{'order_id' : 5, 'items':(Item, [])},
]
-
# monkeypatches unittest.TestLoader.suiteClass at import time
-import testbase
import itertools, unittest, re, sys, os, operator, warnings
from cStringIO import StringIO
import testlib.config as config
'between': lambda val, pair: val >= pair[0] and val <= pair[1],
}
+# sugar ('testing.db'); set here by config() at runtime
+db = None
+
def fails_on(*dbs):
"""Mark a test as expected to fail on one or more database implementations.
-import testbase
+import testenv; testenv.configure_for_tests()
import unittest
def suite():
if __name__ == '__main__':
- testbase.main(suite())
+ testenv.main(suite())
class Blog(object):
def __init__(self, owner=None):
self.owner = owner
-
+
class Post(object):
topics = set
def __init__(self, user=None, headline=None, summary=None):
self.summary = summary
self.comments = []
self.comment_count = 0
-
+
class Topic(object):
def __init__(self, keyword=None, description=None):
self.keyword = keyword
self.post = post
self.topic = topic
self.is_primary = is_primary
-
+
class Comment(object):
def __init__(self, subject=None, body=None):
self.subject = subject
self.datetime = datetime.datetime.today()
self.body = body
-
-
-import testbase
-
+import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from sqlalchemy.orm import *
from testlib import *
class ZBlogTest(AssertMixin):
def create_tables(self):
- tables.metadata.drop_all(bind=testbase.db)
- tables.metadata.create_all(bind=testbase.db)
+ tables.metadata.drop_all(bind=testing.db)
+ tables.metadata.create_all(bind=testing.db)
def drop_tables(self):
- tables.metadata.drop_all(bind=testbase.db)
-
+ tables.metadata.drop_all(bind=testing.db)
+
def setUpAll(self):
self.create_tables()
def tearDownAll(self):
super(SavePostTest, self).setUpAll()
mappers.zblog_mappers()
global blog_id, user_id
- s = create_session(bind=testbase.db)
+ s = create_session(bind=testing.db)
user = User('zbloguser', "Zblog User", "hello", group=administrator)
blog = Blog(owner=user)
blog.name = "this is a blog"
def tearDownAll(self):
clear_mappers()
super(SavePostTest, self).tearDownAll()
-
+
def testattach(self):
"""test that a transient/pending instance has proper bi-directional behavior.
-
+
this requires that lazy loaders do not fire off for a transient/pending instance."""
- s = create_session(bind=testbase.db)
+ s = create_session(bind=testing.db)
s.begin()
try:
assert post in blog.posts
finally:
s.rollback()
-
+
def testoptimisticorphans(self):
- """test that instances in the session with un-loaded parents will not
+ """test that instances in the session with un-loaded parents will not
get marked as "orphans" and then deleted """
- s = create_session(bind=testbase.db)
-
+ s = create_session(bind=testing.db)
+
s.begin()
try:
blog = s.query(Blog).get(blog_id)
comment.user = user
s.flush()
s.clear()
-
+
assert s.query(Post).get(post.id) is not None
-
+
finally:
s.rollback()
-
-
-if __name__ == "__main__":
- testbase.main()
-
+
+if __name__ == "__main__":
+ testenv.main()
def cryptpw(password, salt=None):
if salt is None:
- salt = string.join([chr(random.randint(ord('a'), ord('z'))), chr(random.randint(ord('a'), ord('z')))],'')
+ salt = string.join([chr(random.randint(ord('a'), ord('z'))),
+ chr(random.randint(ord('a'), ord('z')))],'')
return sha(password + salt).hexdigest()
-
+
def checkpw(password, dbpw):
return cryptpw(password, dbpw[:2]) == dbpw
password = property(lambda s: None, _set_password)
def checkpw(self, password):
- return checkpw(password, self.crypt_password)
\ No newline at end of file
+ return checkpw(password, self.crypt_password)