/mapping_setup.py
/test.py
/.cache/
+*.sw[o,p]
# execute with a non-unicode object. a warning is emitted,
# this warning shouldn't clog up memory.
- self.engine.execute(table1.select().where(table1.c.col2
- == 'foo%d' % i[0]))
+ self.engine.execute(table1.select().where(
+ table1.c.col2 == 'foo%d' % i[0]))
i[0] += 1
try:
go()
startswith('L')))) == 2
assert len(fulltable(Animal.select(Animal.c.Species.
endswith('pede')))) == 2
- assert len(fulltable(Animal.select(Animal.c.LastEscape
- != None))) == 1
+ assert len(fulltable(
+ Animal.select(Animal.c.LastEscape != None))) == 1 # noqa
assert len(
- fulltable(
- Animal.select(
- None == Animal.c.LastEscape))) == ITERATIONS + 11
+ fulltable(Animal.select(
+ None == Animal.c.LastEscape))) == ITERATIONS + 11 # noqa
# In operator (containedby)
fulltable(
Zoo.select(
and_(
- Zoo.c.Founded != None,
+ Zoo.c.Founded != None, # noqa
Zoo.c.Founded < func.current_timestamp(
_type=Date))))) == 3
assert len(
assert lifespan == expected[species]
expected = ['Montr\xe9al Biod\xf4me', 'Wild Animal Park']
e = select([Zoo.c.Name],
- and_(Zoo.c.Founded != None,
+ and_(Zoo.c.Founded != None, # noqa
Zoo.c.Founded <= func.current_timestamp(),
Zoo.c.Founded >= datetime.date(1990,
1,
self.session.add(Animal(Species='Ostrich', Legs=2, Lifespan=103.2))
self.session.add(Animal(Species='Centipede', Legs=100))
self.session.add(Animal(Species='Emperor Penguin', Legs=2,
- ZooID=seaworld.ID))
+ ZooID=seaworld.ID))
self.session.add(Animal(Species='Adelie Penguin', Legs=2,
- ZooID=seaworld.ID))
+ ZooID=seaworld.ID))
self.session.add(Animal(Species='Millipede', Legs=1000000,
- ZooID=sdz.ID))
+ ZooID=sdz.ID))
# Add a mother and child to test relationships
bai_yun = Animal(Species='Ape', Nameu='Bai Yun', Legs=2)
self.session.add(bai_yun)
self.session.add(Animal(Species='Ape', Name='Hua Mei', Legs=2,
- MotherID=bai_yun.ID))
+ MotherID=bai_yun.ID))
self.session.commit()
def _baseline_2_insert(self):
for x in range(ITERATIONS):
self.session.add(Animal(Species='Tick', Name='Tick %d' % x,
- Legs=8))
+ Legs=8))
self.session.flush()
def _baseline_3_properties(self):
for x in range(ITERATIONS):
assert len(list(self.session.query(Zoo))) == 5
assert len(list(self.session.query(Animal))) == ITERATIONS + 12
- assert len(list(self.session.query(Animal).filter(Animal.Legs
- == 4))) == 4
- assert len(list(self.session.query(Animal).filter(Animal.Legs
- == 2))) == 5
+ assert len(list(self.session.query(Animal)
+ .filter(Animal.Legs == 4))) == 4
+ assert len(list(self.session.query(Animal)
+ .filter(Animal.Legs == 2))) == 5
assert len(
list(
self.session.query(Animal).filter(
and_(
Animal.Legs >= 2,
Animal.Legs < 20)))) == ITERATIONS + 9
- assert len(list(self.session.query(Animal).filter(Animal.Legs
- > 10))) == 2
- assert len(list(self.session.query(Animal).filter(Animal.Lifespan
- > 70))) == 2
+ assert len(list(self.session.query(Animal)
+ .filter(Animal.Legs > 10))) == 2
+ assert len(list(self.session.query(Animal)
+ .filter(Animal.Lifespan > 70))) == 2
assert len(list(self.session.query(Animal).
filter(Animal.Species.like('L%')))) == 2
assert len(list(self.session.query(Animal).
filter(Animal.Species.like('%pede')))) == 2
- assert len(list(self.session.query(Animal).filter(Animal.LastEscape
- != None))) == 1
+ assert len(list(self.session.query(Animal)
+ .filter(Animal.LastEscape != None))) == 1 # noqa
assert len(
list(
self.session.query(Animal).filter(
- Animal.LastEscape == None))) == ITERATIONS + 11
+ Animal.LastEscape == None))) == ITERATIONS + 11 # noqa
# In operator (containedby)
list(
self.session.query(Zoo).filter(
and_(
- Zoo.Founded != None,
+ Zoo.Founded != None, # noqa
Zoo.Founded < func.now())))) == 3
- assert len(list(self.session.query(Animal).filter(Animal.LastEscape
- == func.now()))) == 0
+ assert len(list(self.session.query(Animal)
+ .filter(Animal.LastEscape == func.now()))) == 0
assert len(list(self.session.query(Animal).filter(
func.date_part('year', Animal.LastEscape) == 2004))) == 1
assert len(
assert lifespan == expected[species]
expected = ['Montr\xe9al Biod\xf4me', 'Wild Animal Park']
e = select([Zoo.c.Name],
- and_(Zoo.c.Founded != None,
+ and_(Zoo.c.Founded != None, # noqa
Zoo.c.Founded <= func.current_timestamp(),
Zoo.c.Founded >= datetime.date(1990,
1,
# Edit
- SDZ = self.session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
- ).one()
+ SDZ = self.session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo') \
+ .one()
SDZ.Name = 'The San Diego Zoo'
SDZ.Founded = datetime.date(1900, 1, 1)
SDZ.Opens = datetime.time(7, 30, 0)
# Test edits
- SDZ = self.session.query(Zoo).filter(Zoo.Name
- == 'The San Diego Zoo').one()
+ SDZ = self.session.query(Zoo) \
+ .filter(Zoo.Name == 'The San Diego Zoo').one()
assert SDZ.Founded == datetime.date(1900, 1, 1), SDZ.Founded
# Change it back
# Test re-edits
- SDZ = self.session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
- ).one()
+ SDZ = self.session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo') \
+ .one()
assert SDZ.Founded == datetime.date(1835, 9, 13), \
SDZ.Founded
from sqlalchemy import exc
from sqlalchemy.testing import fixtures
+
class DependencySortTest(fixtures.TestBase):
def assert_sort(self, tuples, allitems=None):
assert conforms_partial_ordering(tuples, result)
def assert_sort_deterministic(self, tuples, allitems, expected):
- result = list(topological.sort(tuples, allitems, deterministic_order=True))
+ result = list(topological.sort(tuples,
+ allitems,
+ deterministic_order=True))
assert conforms_partial_ordering(tuples, result)
assert result == expected
'node4']))
eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'),
('node2', 'node3'), ('node1', 'node2'),
- ('node4','node5'), ('node5', 'node4')]))
+ ('node4', 'node5'), ('node5', 'node4')]))
def test_raise_on_cycle_two(self):
eq_(err.cycles, set(['node1', 'node3', 'node2']))
eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'),
('node3', 'node2'), ('node1', 'node2'),
- ('node2','node4')]))
+ ('node2', 'node4')]))
def test_raise_on_cycle_three(self):
question, issue, providerservice, answer, provider = \
tuples = [(node1, node2), (node3, node1), (node2, node4),
(node3, node2), (node2, node3)]
eq_(topological.find_cycles(tuples,
- self._nodes_from_tuples(tuples)), set([node1, node2,
- node3]))
+ self._nodes_from_tuples(tuples)), set([node1, node2, node3]))
def test_find_multiple_cycles_one(self):
node1 = 'node1'
# node6 only became present here once [ticket:2282] was addressed.
eq_(
topological.find_cycles(tuples, allnodes),
- set(['node1','node2', 'node4', 'node6'])
+ set(['node1', 'node2', 'node4', 'node6'])
)
def test_find_multiple_cycles_three(self):
('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'),
('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'),
('node11', 'node2'), ('node10', 'node20'), ('node1', 'node11'),
- ('node20', 'node19'), ('node4', 'node20'), ('node15', 'node20'),
- ('node9', 'node19'), ('node11', 'node10'), ('node11', 'node19'),
- ('node13', 'node6'), ('node3', 'node15'), ('node9', 'node11'),
- ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
- ('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1')
+ ('node20', 'node19'), ('node4', 'node20'), ('node15', 'node20'),
+ ('node9', 'node19'), ('node11', 'node10'), ('node11', 'node19'),
+ ('node13', 'node6'), ('node3', 'node15'), ('node9', 'node11'),
+ ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
+ ('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1')
]
allnodes = ['node%d' % i for i in range(1, 21)]
eq_(
topological.find_cycles(tuples, allnodes),
set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17',
- 'node19', 'node20', 'node8', 'node1', 'node3',
- 'node2', 'node4', 'node6'])
+ 'node19', 'node20', 'node8', 'node1', 'node3', 'node2',
+ 'node4', 'node6'])
)
# if _JoinedListener fixes .listeners
# at construction time, then we don't get
# the new listeners.
- #eq_(l1.mock_calls, [])
+ # eq_(l1.mock_calls, [])
# alternatively, if _JoinedListener shares the list
# using a @property, then we get them, at the arguable
class WrongNameError(DatabaseError):
pass
+
# but they're going to call it their "IntegrityError"
IntegrityError = WrongNameError
from sqlalchemy import inspection, inspect
from sqlalchemy.testing import fixtures
+
class TestFixture(object):
pass
+
class TestInspection(fixtures.TestBase):
def tearDown(self):
@inspection._inspects(SomeFoo)
def insp_somefoo(subject):
- return {"insp":subject}
+ return {"insp": subject}
somefoo = SomeFoo()
insp = inspect(somefoo)
eq_(list(o.values()), [1, 2, 3, 4, 5, 6])
def test_odict_constructor(self):
- o = util.OrderedDict([('name', 'jbe'), ('fullname', 'jonathan'
- ), ('password', '')])
+ o = util.OrderedDict([('name', 'jbe'),
+ ('fullname', 'jonathan'), ('password', '')])
eq_(list(o.keys()), ['name', 'fullname', 'password'])
def test_odict_copy(self):
def test_dupes_add(self):
cc = sql.ColumnCollection()
- c1, c2a, c3, c2b = column('c1'), column('c2'), column('c3'), column('c2')
+ c1, c2a, c3, c2b = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c2'))
cc.add(c1)
cc.add(c2a)
def test_replace(self):
cc = sql.ColumnCollection()
- c1, c2a, c3, c2b = column('c1'), column('c2'), column('c3'), column('c2')
+ c1, c2a, c3, c2b = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c2'))
cc.add(c1)
cc.add(c2a)
def test_replace_key_matches(self):
cc = sql.ColumnCollection()
- c1, c2a, c3, c2b = column('c1'), column('c2'), column('c3'), column('X')
+ c1, c2a, c3, c2b = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('X'))
c2b.key = 'c2'
cc.add(c1)
def test_replace_name_matches(self):
cc = sql.ColumnCollection()
- c1, c2a, c3, c2b = column('c1'), column('c2'), column('c3'), column('c2')
+ c1, c2a, c3, c2b = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c2'))
c2b.key = 'X'
cc.add(c1)
def test_dupes_extend(self):
cc = sql.ColumnCollection()
- c1, c2a, c3, c2b = column('c1'), column('c2'), column('c3'), column('c2')
+ c1, c2a, c3, c2b = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c2'))
cc.add(c1)
cc.add(c2a)
def test_dupes_update(self):
cc = sql.ColumnCollection()
- c1, c2a, c3, c2b = column('c1'), column('c2'), column('c3'), column('c2')
+ c1, c2a, c3, c2b = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c2'))
cc.add(c1)
cc.add(c2a)
def test_extend_existing(self):
cc = sql.ColumnCollection()
- c1, c2, c3, c4, c5 = column('c1'), column('c2'), column('c3'), column('c4'), column('c5')
+ c1, c2, c3, c4, c5 = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c4'),
+ column('c5'))
cc.extend([c1, c2])
eq_(cc._all_columns, [c1, c2])
def test_update_existing(self):
cc = sql.ColumnCollection()
- c1, c2, c3, c4, c5 = column('c1'), column('c2'), column('c3'), column('c4'), column('c5')
+ c1, c2, c3, c4, c5 = (column('c1'),
+ column('c2'),
+ column('c3'),
+ column('c4'),
+ column('c5'))
cc.update([('c1', c1), ('c2', c2)])
eq_(cc._all_columns, [c1, c2])
eq_(cc._all_columns, [c1, c2, c3, c4, c5])
-
class LRUTest(fixtures.TestBase):
def test_lru(self):
def __str__(self):
return "item id %d" % self.id
- l = util.LRUCache(10, threshold=.2)
+ lru = util.LRUCache(10, threshold=.2)
for id in range(1, 20):
- l[id] = item(id)
+ lru[id] = item(id)
# first couple of items should be gone
- assert 1 not in l
- assert 2 not in l
+ assert 1 not in lru
+ assert 2 not in lru
# next batch over the threshold of 10 should be present
for id_ in range(11, 20):
- assert id_ in l
+ assert id_ in lru
- l[12]
- l[15]
- l[23] = item(23)
- l[24] = item(24)
- l[25] = item(25)
- l[26] = item(26)
- l[27] = item(27)
+ lru[12]
+ lru[15]
+ lru[23] = item(23)
+ lru[24] = item(24)
+ lru[25] = item(25)
+ lru[26] = item(26)
+ lru[27] = item(27)
- assert 11 not in l
- assert 13 not in l
+ assert 11 not in lru
+ assert 13 not in lru
for id_ in (25, 24, 23, 14, 12, 19, 18, 17, 16, 15):
- assert id_ in l
+ assert id_ in lru
- i1 = l[25]
+ i1 = lru[25]
i2 = item(25)
- l[25] = i2
- assert 25 in l
- assert l[25] is i2
+ lru[25] = i2
+ assert 25 in lru
+ assert lru[25] is i2
class ImmutableSubclass(str):
def __iter__(self):
return iter(self + '')
- assert list(util.flatten_iterator([IterString('asdf'),
- [IterString('x'), IterString('y')]])) == ['asdf',
- 'x', 'y']
+ iter_list = [IterString('asdf'), [IterString('x'), IterString('y')]]
+
+ assert list(util.flatten_iterator(iter_list)) == ['asdf', 'x', 'y']
class HashOverride(object):
eq_(ids.copy(), ids)
# explicit __eq__ and __ne__ tests
- assert ids != None
- assert not(ids == None)
+ assert ids != None # noqa
+ assert not(ids == None) # noqa
ne_(ids, IdentitySet([o1, o2, o3]))
ids.clear()
eq_ = self.assert_eq
a, b, c, d, e, f, g = \
- elem(), elem(), elem(), elem(), elem(), elem(), elem()
+ elem(), elem(), elem(), elem(), elem(), elem(), elem()
s1 = util.OrderedIdentitySet([a, b, c])
s2 = util.OrderedIdentitySet([d, e, f])
def _notok(self, instance):
assert_raises(TypeError,
- util.dictlike_iteritems,
- instance)
+ util.dictlike_iteritems,
+ instance)
def test_dict(self):
d = dict(a=1, b=2, c=3)
@fails_if(lambda: util.pypy, "pypy returns plain *arg, **kw")
def test_callable_argspec_partial(self):
from functools import partial
+
def foo(x, y, z, **kw):
pass
bar = partial(foo, 5)
get_callable_argspec, bar
)
+
class SymbolTest(fixtures.TestBase):
def test_basic(self):
eq_(parsed, wanted)
test(lambda: None,
- {'args': '()', 'self_arg': None,
- 'apply_kw': '()', 'apply_pos': '()'})
+ {'args': '()', 'self_arg': None,
+ 'apply_kw': '()', 'apply_pos': '()'})
test(lambda: None,
- {'args': '', 'self_arg': None,
- 'apply_kw': '', 'apply_pos': ''},
- grouped=False)
+ {'args': '', 'self_arg': None,
+ 'apply_kw': '', 'apply_pos': ''},
+ grouped=False)
test(lambda self: None,
- {'args': '(self)', 'self_arg': 'self',
- 'apply_kw': '(self)', 'apply_pos': '(self)'})
+ {'args': '(self)', 'self_arg': 'self',
+ 'apply_kw': '(self)', 'apply_pos': '(self)'})
test(lambda self: None,
- {'args': 'self', 'self_arg': 'self',
- 'apply_kw': 'self', 'apply_pos': 'self'},
- grouped=False)
+ {'args': 'self', 'self_arg': 'self',
+ 'apply_kw': 'self', 'apply_pos': 'self'},
+ grouped=False)
test(lambda *a: None,
- {'args': '(*a)', 'self_arg': 'a[0]',
- 'apply_kw': '(*a)', 'apply_pos': '(*a)'})
+ {'args': '(*a)', 'self_arg': 'a[0]',
+ 'apply_kw': '(*a)', 'apply_pos': '(*a)'})
test(lambda **kw: None,
- {'args': '(**kw)', 'self_arg': None,
- 'apply_kw': '(**kw)', 'apply_pos': '(**kw)'})
+ {'args': '(**kw)', 'self_arg': None,
+ 'apply_kw': '(**kw)', 'apply_pos': '(**kw)'})
test(lambda *a, **kw: None,
- {'args': '(*a, **kw)', 'self_arg': 'a[0]',
- 'apply_kw': '(*a, **kw)', 'apply_pos': '(*a, **kw)'})
+ {'args': '(*a, **kw)', 'self_arg': 'a[0]',
+ 'apply_kw': '(*a, **kw)', 'apply_pos': '(*a, **kw)'})
test(lambda a, *b: None,
- {'args': '(a, *b)', 'self_arg': 'a',
- 'apply_kw': '(a, *b)', 'apply_pos': '(a, *b)'})
+ {'args': '(a, *b)', 'self_arg': 'a',
+ 'apply_kw': '(a, *b)', 'apply_pos': '(a, *b)'})
test(lambda a, **b: None,
- {'args': '(a, **b)', 'self_arg': 'a',
- 'apply_kw': '(a, **b)', 'apply_pos': '(a, **b)'})
+ {'args': '(a, **b)', 'self_arg': 'a',
+ 'apply_kw': '(a, **b)', 'apply_pos': '(a, **b)'})
test(lambda a, *b, **c: None,
- {'args': '(a, *b, **c)', 'self_arg': 'a',
- 'apply_kw': '(a, *b, **c)', 'apply_pos': '(a, *b, **c)'})
+ {'args': '(a, *b, **c)', 'self_arg': 'a',
+ 'apply_kw': '(a, *b, **c)', 'apply_pos': '(a, *b, **c)'})
test(lambda a, b=1, **c: None,
- {'args': '(a, b=1, **c)', 'self_arg': 'a',
- 'apply_kw': '(a, b=b, **c)', 'apply_pos': '(a, b, **c)'})
+ {'args': '(a, b=1, **c)', 'self_arg': 'a',
+ 'apply_kw': '(a, b=b, **c)', 'apply_pos': '(a, b, **c)'})
test(lambda a=1, b=2: None,
- {'args': '(a=1, b=2)', 'self_arg': 'a',
- 'apply_kw': '(a=a, b=b)', 'apply_pos': '(a, b)'})
+ {'args': '(a=1, b=2)', 'self_arg': 'a',
+ 'apply_kw': '(a=a, b=b)', 'apply_pos': '(a, b)'})
test(lambda a=1, b=2: None,
- {'args': 'a=1, b=2', 'self_arg': 'a',
- 'apply_kw': 'a=a, b=b', 'apply_pos': 'a, b'},
- grouped=False)
+ {'args': 'a=1, b=2', 'self_arg': 'a',
+ 'apply_kw': 'a=a, b=b', 'apply_pos': 'a, b'},
+ grouped=False)
@testing.fails_if(lambda: util.pypy,
- "pypy doesn't report O.__init__ as object.__init__")
+ "pypy doesn't report Obj.__init__ as object.__init__")
def test_init_grouped(self):
object_spec = {
'args': '(self)', 'self_arg': 'self',
self._test_init(True, object_spec, wrapper_spec, custom_spec)
@testing.fails_if(lambda: util.pypy,
- "pypy doesn't report O.__init__ as object.__init__")
+ "pypy doesn't report Obj.__init__ as object.__init__")
def test_init_bare(self):
object_spec = {
'args': 'self', 'self_arg': 'self',
parsed = util.format_argspec_init(fn, grouped=grouped)
eq_(parsed, wanted)
- class O(object):
+ class Obj(object):
pass
- test(O.__init__, object_spec)
+ test(Obj.__init__, object_spec)
- class O(object):
+ class Obj(object):
def __init__(self):
pass
- test(O.__init__, object_spec)
+ test(Obj.__init__, object_spec)
- class O(object):
+ class Obj(object):
def __init__(slef, a=123):
pass
- test(O.__init__, custom_spec)
+ test(Obj.__init__, custom_spec)
- class O(list):
+ class Obj(list):
pass
- test(O.__init__, wrapper_spec)
+ test(Obj.__init__, wrapper_spec)
- class O(list):
+ class Obj(list):
def __init__(self, *args, **kwargs):
pass
- test(O.__init__, wrapper_spec)
+ test(Obj.__init__, wrapper_spec)
- class O(list):
+ class Obj(list):
def __init__(self):
pass
- test(O.__init__, object_spec)
+ test(Obj.__init__, object_spec)
- class O(list):
+ class Obj(list):
def __init__(slef, a=123):
pass
- test(O.__init__, custom_spec)
+ test(Obj.__init__, custom_spec)
class GenericReprTest(fixtures.TestBase):
self.b = b
self.c = c
self.d = d
+
class Bar(Foo):
def __init__(self, e, f, g=5, **kw):
self.e = e
def __init__(self, a=1, b=2):
self.a = a
self.b = b
+
class Bar(Foo):
def __init__(self, b=3, c=4, **kw):
self.c = c
"Bar(b='b', c='c', a='a')"
)
-
def test_discard_vargs(self):
class Foo(object):
def __init__(self, a, b, *args):
def test_instance(self):
obj = object()
assert_raises(TypeError, util.as_interface, obj,
- cls=self.Something)
+ cls=self.Something)
assert_raises(TypeError, util.as_interface, obj,
- methods=('foo'))
+ methods=('foo'))
assert_raises(TypeError, util.as_interface, obj,
- cls=self.Something, required=('foo'))
+ cls=self.Something, required=('foo'))
obj = self.Something()
eq_(obj, util.as_interface(obj, cls=self.Something))
for obj in partial, slotted:
eq_(obj, util.as_interface(obj, cls=self.Something))
assert_raises(TypeError, util.as_interface, obj,
- methods=('foo'))
+ methods=('foo'))
eq_(obj, util.as_interface(obj, methods=('bar',)))
eq_(obj, util.as_interface(obj, cls=self.Something,
required=('bar',)))
assert_raises(TypeError, util.as_interface, obj,
- cls=self.Something, required=('foo',))
+ cls=self.Something, required=('foo',))
assert_raises(TypeError, util.as_interface, obj,
- cls=self.Something, required=self.Something)
+ cls=self.Something, required=self.Something)
def test_dict(self):
obj = {}
found.remove(method)
assert not found
- fn = lambda self: 123
+ def fn(self): return 123
obj = {'foo': fn, 'bar': fn}
res = util.as_interface(obj, cls=self.Something)
assertAdapted(res, 'foo', 'bar')
def go():
try:
raise m1
- except:
+ except Exception:
with util.safe_reraise():
go2()
to_bootstrap = "pytest"
exec(code, globals(), locals())
from pytestplugin import * # noqa
-
UniqueConstraint, Index, Sequence, literal
from sqlalchemy import testing
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = mssql.dialect()
def test_pyodbc_hostname(self):
dialect = pyodbc.dialect()
- u = url.make_url('mssql://username:password@hostspec/database?driver=SQL+Server')
+ u = url.make_url(
+ 'mssql://username:password@hostspec/database?driver=SQL+Server'
+ )
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
eq_(connection[1], {})
eq_(connection[0][0]
in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
- 'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
- 'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
- 'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
+ 'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
+ 'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
+ 'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
def test_pyodbc_odbc_connect(self):
dialect = pyodbc.dialect()
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost', 'password': 'tiger',
- 'user': 'scott', 'database': 'test'}], connection
+ 'user': 'scott', 'database': 'test'}], connection
)
u = \
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost:5000', 'password': 'tiger',
- 'user': 'scott', 'database': 'test'}], connection
+ 'user': 'scott', 'database': 'test'}], connection
)
def test_pymssql_disconnect(self):
'Error 10054',
'Not connected to any MS SQL server',
'Connection is closed'
- ]:
+ ]:
eq_(dialect.is_disconnect(error, None, None), True)
eq_(dialect.is_disconnect("not an error", None, None), False)
eq_(
dialect._get_server_version_info(conn),
(11, 0, 9216, 62)
- )
\ No newline at end of file
+ )
connection.execute('CREATE FULLTEXT CATALOG Catalog AS '
'DEFAULT')
return False
- except:
+ except Exception:
return True
finally:
connection.close()
from sqlalchemy import *
from sqlalchemy import types, schema, event
from sqlalchemy.databases import mssql
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
- ComparesTables
+from sqlalchemy.testing import (fixtures,
+ AssertsCompiledSQL,
+ ComparesTables)
from sqlalchemy import testing
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy import util
+from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
+from sqlalchemy.dialects.mssql import base
class ReflectionTest(fixtures.TestBase, ComparesTables):
server_default='5'),
Column('test9', types.BINARY(100)),
Column('test_numeric', types.Numeric()),
- )
+ )
addresses = Table(
'engine_email_addresses',
autoload=True,
autoload_with=testing.db)
reflected_addresses = Table('engine_email_addresses',
- meta2, autoload=True, autoload_with=testing.db)
+ meta2,
+ autoload=True,
+ autoload_with=testing.db)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
meta2 = MetaData(testing.db)
table2 = Table('identity_test', meta2, autoload=True)
sequence = isinstance(table2.c['col1'].default, schema.Sequence) \
- and table2.c['col1'].default
+ and table2.c['col1'].default
assert sequence.start == 2
assert sequence.increment == 3
assert isinstance(t1.c.id.type, Integer)
assert isinstance(t1.c.data.type, types.NullType)
-
@testing.provide_metadata
def test_db_qualified_items(self):
metadata = self.metadata
Table('foo', metadata, Column('id', Integer, primary_key=True))
- Table('bar', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('foo.id', name="fkfoo"))
- )
+ Table('bar',
+ metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('foo.id', name="fkfoo")))
metadata.create_all()
dbname = testing.db.scalar("select db_name()")
inspector = inspect(testing.db)
bar_via_db = inspector.get_foreign_keys(
- "bar", schema="%s.%s" % (dbname, owner))
+ "bar", schema="%s.%s" % (dbname, owner))
eq_(
bar_via_db,
[{
autoload_with=testing.db)
eq_(m2.tables["test.dbo.foo"].schema, "test.dbo")
-
@testing.provide_metadata
def test_indexes_cols(self):
metadata = self.metadata
def test_indexes_cols_with_commas(self):
metadata = self.metadata
- t1 = Table('t', metadata,
- Column('x, col', Integer, key='x'),
- Column('y', Integer)
- )
+ t1 = Table('t',
+ metadata,
+ Column('x, col', Integer, key='x'),
+ Column('y', Integer))
Index('foo', t1.c.x, t1.c.y)
metadata.create_all()
def test_indexes_cols_with_spaces(self):
metadata = self.metadata
- t1 = Table('t', metadata, Column('x col', Integer, key='x'),
- Column('y', Integer))
+ t1 = Table('t',
+ metadata,
+ Column('x col', Integer, key='x'),
+ Column('y', Integer))
Index('foo', t1.c.x, t1.c.y)
metadata.create_all()
in_('max', str(col['type'].compile(dialect=testing.db.dialect)))
-from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
-from sqlalchemy.dialects.mssql import base
-
class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
def test_info_unicode_coercion(self):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = "
+ "CAST(:table_name_1 AS NVARCHAR(max))",
dialect=dialect
)
+
class ReflectHugeViewTest(fixtures.TestBase):
__only_on__ = 'mssql'
__backend__ = True
self.metadata = MetaData(testing.db)
t = Table('base_table', self.metadata,
- *[
- Column("long_named_column_number_%d" % i, Integer)
+ *[Column("long_named_column_number_%d" % i, Integer)
for i in range(self.col_num)
- ]
- )
+ ]
+ )
self.view_str = view_str = \
"CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
- ",".join("long_named_column_number_%d" % i
- for i in range(self.col_num))
+ ",".join("long_named_column_number_%d" % i
+ for i in range(self.col_num))
)
assert len(view_str) > 4000
- event.listen(t, 'after_create', DDL(view_str) )
- event.listen(t, 'before_drop', DDL("DROP VIEW huge_named_view") )
+ event.listen(t, 'after_create', DDL(view_str))
+ event.listen(t, 'before_drop', DDL("DROP VIEW huge_named_view"))
self.metadata.create_all()
inspector = Inspector.from_engine(testing.db)
view_def = inspector.get_view_definition("huge_named_view")
eq_(view_def, self.view_str)
-
% (col.name, columns[index][3]))
self.assert_(repr(col))
+
metadata = None
result = module.Binary(input)
eq_(result, expected_result)
+
binary_table = None
MyPickleType = None
mypickle=MyPickleType),
bind=testing.db):
with engine.connect() as conn:
- l = conn.execute(stmt).fetchall()
- eq_(list(stream1), list(l[0]['data']))
+ result = conn.execute(stmt).fetchall()
+ eq_(list(stream1), list(result[0]['data']))
paddedstream = list(stream1[0:100])
paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
- eq_(paddedstream, list(l[0]['data_slice']))
- eq_(list(stream2), list(l[1]['data']))
- eq_(list(stream2), list(l[1]['data_image']))
- eq_(testobj1, l[0]['pickled'])
- eq_(testobj2, l[1]['pickled'])
- eq_(testobj3.moredata, l[0]['mypickle'].moredata)
- eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
+ eq_(paddedstream, list(result[0]['data_slice']))
+ eq_(list(stream2), list(result[1]['data']))
+ eq_(list(stream2), list(result[1]['data_image']))
+ eq_(testobj1, result[0]['pickled'])
+ eq_(testobj2, result[1]['pickled'])
+ eq_(testobj3.moredata, result[0]['mypickle'].moredata)
+ eq_(result[0]['mypickle'].stuff, 'this is the right stuff')
def _test_binary_none(self, deprecate_large_types):
engine = engines.testing_engine(
from sqlalchemy.sql import table, column
import re
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = mysql.dialect()
def test_create_index_with_prefix(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', String(255)))
- idx = Index('test_idx1', tbl.c.data, mysql_length=10, mysql_prefix='FULLTEXT')
+ idx = Index('test_idx1', tbl.c.data, mysql_length=10,
+ mysql_prefix='FULLTEXT')
self.assert_compile(schema.CreateIndex(idx),
- 'CREATE FULLTEXT INDEX test_idx1 ON testtbl (data(10))')
+ 'CREATE FULLTEXT INDEX test_idx1 '
+ 'ON testtbl (data(10))')
def test_create_index_with_length(self):
m = MetaData()
self.assert_compile(
t1.outerjoin(t2, t1.c.x == t2.c.y, full=True),
"t1 FULL OUTER JOIN t2 ON t1.x = t2.y"
- )
\ No newline at end of file
+ )
def _test_ssl_arguments(self, dialect):
kwarg = dialect.create_connect_args(
make_url("mysql://scott:tiger@localhost:3306/test"
- "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem")
+ "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem")
)[1]
# args that differ among mysqldb and oursql
for k in ('use_unicode', 'found_rows', 'client_flag'):
from sqlalchemy.dialects.mysql import mysqlconnector
dialect = mysqlconnector.dialect()
kw = dialect.create_connect_args(
- make_url("mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true")
- )[1]
+ make_url(
+ "mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true"
+ )
+ )[1]
eq_(kw['raise_on_warnings'], True)
kw = dialect.create_connect_args(
- make_url("mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false")
- )[1]
+ make_url(
+ "mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false"
+ )
+ )[1]
eq_(kw['raise_on_warnings'], False)
-
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db")
)[1]
)
assert c.execute('SELECT @@tx_isolation;').scalar() == mysql_value
+
class SQLModeDetectionTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
cursor = con.cursor()
cursor.execute("set sql_mode='%s'" % (",".join(modes)))
e = engines.testing_engine(options={
- 'pool_events':[
+ 'pool_events': [
(connect, 'first_connect'),
(connect, 'connect')
]
def test_sysdate(self):
d = testing.db.scalar(func.sysdate())
assert isinstance(d, datetime.datetime)
-
metadata = MetaData(testing.db)
cattable = Table('cattable', metadata,
- Column('id', Integer, primary_key=True),
- Column('description', String(50)),
- mysql_engine='MyISAM'
- )
+ Column('id', Integer, primary_key=True),
+ Column('description', String(50)),
+ mysql_engine='MyISAM')
matchtable = Table('matchtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('title', String(200)),
- Column('category_id', Integer, ForeignKey('cattable.id')),
- mysql_engine='MyISAM'
- )
+ Column('id', Integer, primary_key=True),
+ Column('title', String(200)),
+ Column('category_id',
+ Integer,
+ ForeignKey('cattable.id')),
+ mysql_engine='MyISAM')
metadata.create_all()
cattable.insert().execute([
# test [ticket:3263]
result = testing.db.execute(
select([
- matchtable.c.title.match('Agile Ruby Programming').label('ruby'),
+ matchtable.c.title.match('Agile Ruby Programming')
+ .label('ruby'),
matchtable.c.title.match('Dive Python').label('python'),
matchtable.c.title
]).order_by(matchtable.c.id)
def test_match_across_joins(self):
results = (matchtable.select().
- where(and_(cattable.c.id==matchtable.c.category_id,
+ where(and_(cattable.c.id == matchtable.c.category_id,
or_(cattable.c.description.match('Ruby'),
matchtable.c.title.match('nutshell')))).
order_by(matchtable.c.id).
is_(
testing.db.execute(stmt).scalar(), True
)
-
def test_legacy_enum_types(self):
specs = [
- (mysql.ENUM("''","'fleem'"), mysql.ENUM("''","'fleem'")), # noqa
+ (mysql.ENUM("''", "'fleem'"), mysql.ENUM("''", "'fleem'")),
]
self._run_test(specs, ['enums'])
@testing.provide_metadata
def test_view_reflection(self):
- Table('x', self.metadata, Column('a', Integer), Column('b', String(50)))
+ Table('x',
+ self.metadata,
+ Column('a', Integer),
+ Column('b', String(50)))
self.metadata.create_all()
with testing.db.connect() as conn:
[('a', mysql.INTEGER), ('b', mysql.VARCHAR)]
)
-
@testing.exclude('mysql', '<', (5, 0, 0), 'no information_schema support')
def test_system_views(self):
dialect = testing.db.dialect
'REFERENCES `users` (`id`) '
'ON DELETE CASCADE ON UPDATE CASCADE')
eq_(m.groups(), ('addresses_user_id_fkey', '`user_id`',
- '`users`', '`id`', None, 'CASCADE', 'CASCADE'))
-
+ '`users`', '`id`', None, 'CASCADE', 'CASCADE'))
m = regex.match(' CONSTRAINT `addresses_user_id_fkey` '
'FOREIGN KEY (`user_id`) '
'REFERENCES `users` (`id`) '
'ON DELETE CASCADE ON UPDATE SET NULL')
eq_(m.groups(), ('addresses_user_id_fkey', '`user_id`',
- '`users`', '`id`', None, 'CASCADE', 'SET NULL'))
-
-
+ '`users`', '`id`', None, 'CASCADE', 'SET NULL'))
from sqlalchemy.util import u
from sqlalchemy import util
from sqlalchemy.dialects.mysql import base as mysql
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults
+from sqlalchemy.testing import (fixtures,
+ AssertsCompiledSQL,
+ AssertsExecutionResults)
from sqlalchemy import testing
import datetime
import decimal
from sqlalchemy import types as sqltypes
-class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
+class TypesTest(fixtures.TestBase,
+ AssertsExecutionResults,
+ AssertsCompiledSQL):
"Test MySQL column types"
__dialect__ = mysql.dialect()
'NUMERIC'),
(mysql.MSNumeric, [12], {},
'NUMERIC(12)'),
- (mysql.MSNumeric, [12, 4], {'unsigned':True},
+ (mysql.MSNumeric, [12, 4], {'unsigned': True},
'NUMERIC(12, 4) UNSIGNED'),
- (mysql.MSNumeric, [12, 4], {'zerofill':True},
+ (mysql.MSNumeric, [12, 4], {'zerofill': True},
'NUMERIC(12, 4) ZEROFILL'),
- (mysql.MSNumeric, [12, 4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSNumeric, [12, 4], {'zerofill': True, 'unsigned': True},
'NUMERIC(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSDecimal, [], {},
'DECIMAL(12)'),
(mysql.MSDecimal, [12, None], {},
'DECIMAL(12)'),
- (mysql.MSDecimal, [12, 4], {'unsigned':True},
+ (mysql.MSDecimal, [12, 4], {'unsigned': True},
'DECIMAL(12, 4) UNSIGNED'),
- (mysql.MSDecimal, [12, 4], {'zerofill':True},
+ (mysql.MSDecimal, [12, 4], {'zerofill': True},
'DECIMAL(12, 4) ZEROFILL'),
- (mysql.MSDecimal, [12, 4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSDecimal, [12, 4], {'zerofill': True, 'unsigned': True},
'DECIMAL(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSDouble, [None, None], {},
'DOUBLE'),
- (mysql.MSDouble, [12, 4], {'unsigned':True},
+ (mysql.MSDouble, [12, 4], {'unsigned': True},
'DOUBLE(12, 4) UNSIGNED'),
- (mysql.MSDouble, [12, 4], {'zerofill':True},
+ (mysql.MSDouble, [12, 4], {'zerofill': True},
'DOUBLE(12, 4) ZEROFILL'),
- (mysql.MSDouble, [12, 4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSDouble, [12, 4], {'zerofill': True, 'unsigned': True},
'DOUBLE(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSReal, [None, None], {},
'REAL'),
- (mysql.MSReal, [12, 4], {'unsigned':True},
+ (mysql.MSReal, [12, 4], {'unsigned': True},
'REAL(12, 4) UNSIGNED'),
- (mysql.MSReal, [12, 4], {'zerofill':True},
+ (mysql.MSReal, [12, 4], {'zerofill': True},
'REAL(12, 4) ZEROFILL'),
- (mysql.MSReal, [12, 4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSReal, [12, 4], {'zerofill': True, 'unsigned': True},
'REAL(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSFloat, [], {},
'FLOAT(12)'),
(mysql.MSFloat, [12, 4], {},
'FLOAT(12, 4)'),
- (mysql.MSFloat, [12, 4], {'unsigned':True},
+ (mysql.MSFloat, [12, 4], {'unsigned': True},
'FLOAT(12, 4) UNSIGNED'),
- (mysql.MSFloat, [12, 4], {'zerofill':True},
+ (mysql.MSFloat, [12, 4], {'zerofill': True},
'FLOAT(12, 4) ZEROFILL'),
- (mysql.MSFloat, [12, 4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSFloat, [12, 4], {'zerofill': True, 'unsigned': True},
'FLOAT(12, 4) UNSIGNED ZEROFILL'),
(mysql.MSInteger, [], {},
'INTEGER'),
(mysql.MSInteger, [4], {},
'INTEGER(4)'),
- (mysql.MSInteger, [4], {'unsigned':True},
+ (mysql.MSInteger, [4], {'unsigned': True},
'INTEGER(4) UNSIGNED'),
- (mysql.MSInteger, [4], {'zerofill':True},
+ (mysql.MSInteger, [4], {'zerofill': True},
'INTEGER(4) ZEROFILL'),
- (mysql.MSInteger, [4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSInteger, [4], {'zerofill': True, 'unsigned': True},
'INTEGER(4) UNSIGNED ZEROFILL'),
(mysql.MSBigInteger, [], {},
'BIGINT'),
(mysql.MSBigInteger, [4], {},
'BIGINT(4)'),
- (mysql.MSBigInteger, [4], {'unsigned':True},
+ (mysql.MSBigInteger, [4], {'unsigned': True},
'BIGINT(4) UNSIGNED'),
- (mysql.MSBigInteger, [4], {'zerofill':True},
+ (mysql.MSBigInteger, [4], {'zerofill': True},
'BIGINT(4) ZEROFILL'),
- (mysql.MSBigInteger, [4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSBigInteger, [4], {'zerofill': True, 'unsigned': True},
'BIGINT(4) UNSIGNED ZEROFILL'),
- (mysql.MSMediumInteger, [], {},
- 'MEDIUMINT'),
- (mysql.MSMediumInteger, [4], {},
- 'MEDIUMINT(4)'),
- (mysql.MSMediumInteger, [4], {'unsigned':True},
- 'MEDIUMINT(4) UNSIGNED'),
- (mysql.MSMediumInteger, [4], {'zerofill':True},
- 'MEDIUMINT(4) ZEROFILL'),
- (mysql.MSMediumInteger, [4], {'zerofill':True, 'unsigned':True},
- 'MEDIUMINT(4) UNSIGNED ZEROFILL'),
+ (mysql.MSMediumInteger, [], {},
+ 'MEDIUMINT'),
+ (mysql.MSMediumInteger, [4], {},
+ 'MEDIUMINT(4)'),
+ (mysql.MSMediumInteger, [4], {'unsigned': True},
+ 'MEDIUMINT(4) UNSIGNED'),
+ (mysql.MSMediumInteger, [4], {'zerofill': True},
+ 'MEDIUMINT(4) ZEROFILL'),
+ (mysql.MSMediumInteger, [4], {'zerofill': True, 'unsigned': True},
+ 'MEDIUMINT(4) UNSIGNED ZEROFILL'),
(mysql.MSTinyInteger, [], {},
'TINYINT'),
(mysql.MSTinyInteger, [1], {},
'TINYINT(1)'),
- (mysql.MSTinyInteger, [1], {'unsigned':True},
+ (mysql.MSTinyInteger, [1], {'unsigned': True},
'TINYINT(1) UNSIGNED'),
- (mysql.MSTinyInteger, [1], {'zerofill':True},
+ (mysql.MSTinyInteger, [1], {'zerofill': True},
'TINYINT(1) ZEROFILL'),
- (mysql.MSTinyInteger, [1], {'zerofill':True, 'unsigned':True},
+ (mysql.MSTinyInteger, [1], {'zerofill': True, 'unsigned': True},
'TINYINT(1) UNSIGNED ZEROFILL'),
(mysql.MSSmallInteger, [], {},
'SMALLINT'),
(mysql.MSSmallInteger, [4], {},
'SMALLINT(4)'),
- (mysql.MSSmallInteger, [4], {'unsigned':True},
+ (mysql.MSSmallInteger, [4], {'unsigned': True},
'SMALLINT(4) UNSIGNED'),
- (mysql.MSSmallInteger, [4], {'zerofill':True},
+ (mysql.MSSmallInteger, [4], {'zerofill': True},
'SMALLINT(4) ZEROFILL'),
- (mysql.MSSmallInteger, [4], {'zerofill':True, 'unsigned':True},
+ (mysql.MSSmallInteger, [4], {'zerofill': True, 'unsigned': True},
'SMALLINT(4) UNSIGNED ZEROFILL'),
- ]
+ ]
for type_, args, kw, res in columns:
type_inst = type_(*args, **kw)
@testing.provide_metadata
def test_precision_float_roundtrip(self):
t = Table('t', self.metadata,
- Column('scale_value', mysql.DOUBLE(
- precision=15, scale=12, asdecimal=True)),
- Column('unscale_value', mysql.DOUBLE(
- decimal_return_scale=12, asdecimal=True))
- )
+ Column('scale_value', mysql.DOUBLE(
+ precision=15, scale=12, asdecimal=True)),
+ Column('unscale_value', mysql.DOUBLE(
+ decimal_return_scale=12, asdecimal=True)))
t.create(testing.db)
testing.db.execute(
t.insert(), scale_value=45.768392065789,
columns = [
(mysql.MSChar, [1], {},
'CHAR(1)'),
- (mysql.NCHAR, [1], {},
- 'NATIONAL CHAR(1)'),
- (mysql.MSChar, [1], {'binary':True},
+ (mysql.NCHAR, [1], {},
+ 'NATIONAL CHAR(1)'),
+ (mysql.MSChar, [1], {'binary': True},
'CHAR(1) BINARY'),
- (mysql.MSChar, [1], {'ascii':True},
+ (mysql.MSChar, [1], {'ascii': True},
'CHAR(1) ASCII'),
- (mysql.MSChar, [1], {'unicode':True},
+ (mysql.MSChar, [1], {'unicode': True},
'CHAR(1) UNICODE'),
- (mysql.MSChar, [1], {'ascii':True, 'binary':True},
+ (mysql.MSChar, [1], {'ascii': True, 'binary': True},
'CHAR(1) ASCII BINARY'),
- (mysql.MSChar, [1], {'unicode':True, 'binary':True},
+ (mysql.MSChar, [1], {'unicode': True, 'binary': True},
'CHAR(1) UNICODE BINARY'),
- (mysql.MSChar, [1], {'charset':'utf8'},
+ (mysql.MSChar, [1], {'charset': 'utf8'},
'CHAR(1) CHARACTER SET utf8'),
- (mysql.MSChar, [1], {'charset':'utf8', 'binary':True},
+ (mysql.MSChar, [1], {'charset': 'utf8', 'binary': True},
'CHAR(1) CHARACTER SET utf8 BINARY'),
- (mysql.MSChar, [1], {'charset':'utf8', 'unicode':True},
+ (mysql.MSChar, [1], {'charset': 'utf8', 'unicode': True},
'CHAR(1) CHARACTER SET utf8'),
- (mysql.MSChar, [1], {'charset':'utf8', 'ascii':True},
+ (mysql.MSChar, [1], {'charset': 'utf8', 'ascii': True},
'CHAR(1) CHARACTER SET utf8'),
(mysql.MSChar, [1], {'collation': 'utf8_bin'},
'CHAR(1) COLLATE utf8_bin'),
(mysql.MSChar, [1], {'charset': 'utf8', 'binary': True},
'CHAR(1) CHARACTER SET utf8 BINARY'),
(mysql.MSChar, [1], {'charset': 'utf8', 'collation': 'utf8_bin',
- 'binary': True},
+ 'binary': True},
'CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
- (mysql.MSChar, [1], {'national':True},
+ (mysql.MSChar, [1], {'national': True},
'NATIONAL CHAR(1)'),
- (mysql.MSChar, [1], {'national':True, 'charset':'utf8'},
+ (mysql.MSChar, [1], {'national': True, 'charset': 'utf8'},
'NATIONAL CHAR(1)'),
- (mysql.MSChar, [1], {'national':True, 'charset':'utf8',
- 'binary':True},
+ (mysql.MSChar, [1], {'national': True, 'charset': 'utf8',
+ 'binary': True},
'NATIONAL CHAR(1) BINARY'),
- (mysql.MSChar, [1], {'national':True, 'binary':True,
- 'unicode':True},
+ (mysql.MSChar, [1], {'national': True, 'binary': True,
+ 'unicode': True},
'NATIONAL CHAR(1) BINARY'),
- (mysql.MSChar, [1], {'national':True, 'collation':'utf8_bin'},
+ (mysql.MSChar, [1], {'national': True, 'collation': 'utf8_bin'},
'NATIONAL CHAR(1) COLLATE utf8_bin'),
- (mysql.MSString, [1], {'charset':'utf8', 'collation':'utf8_bin'},
+ (mysql.MSString, [1], {'charset': 'utf8', 'collation': 'utf8_bin'},
'VARCHAR(1) CHARACTER SET utf8 COLLATE utf8_bin'),
- (mysql.MSString, [1], {'national':True, 'collation':'utf8_bin'},
+ (mysql.MSString, [1], {'national': True, 'collation': 'utf8_bin'},
'NATIONAL VARCHAR(1) COLLATE utf8_bin'),
- (mysql.MSTinyText, [], {'charset':'utf8', 'collation':'utf8_bin'},
+ (mysql.MSTinyText,
+ [],
+ {'charset': 'utf8', 'collation': 'utf8_bin'},
'TINYTEXT CHARACTER SET utf8 COLLATE utf8_bin'),
- (mysql.MSMediumText, [], {'charset':'utf8', 'binary':True},
+ (mysql.MSMediumText, [], {'charset': 'utf8', 'binary': True},
'MEDIUMTEXT CHARACTER SET utf8 BINARY'),
- (mysql.MSLongText, [], {'ascii':True},
+ (mysql.MSLongText, [], {'ascii': True},
'LONGTEXT ASCII'),
- (mysql.ENUM, ["foo", "bar"], {'unicode':True},
+ (mysql.ENUM, ["foo", "bar"], {'unicode': True},
'''ENUM('foo','bar') UNICODE'''),
(String, [20], {"collation": "utf8"}, 'VARCHAR(20) COLLATE utf8')
-
-
- ]
+ ]
for type_, args, kw, res in columns:
type_inst = type_(*args, **kw)
# test that repr() copies out all arguments
self.assert_compile(
eval("mysql.%r" % type_inst)
- if type_ is not String
- else eval("%r" % type_inst),
+ if type_ is not String
+ else eval("%r" % type_inst),
res
)
@testing.provide_metadata
def test_charset_collate_table(self):
t = Table('foo', self.metadata,
- Column('id', Integer),
- Column('data', UnicodeText),
- mysql_default_charset='utf8',
- mysql_collate='utf8_bin'
- )
+ Column('id', Integer),
+ Column('data', UnicodeText),
+ mysql_default_charset='utf8',
+ mysql_collate='utf8_bin')
t.create()
m2 = MetaData(testing.db)
t2 = Table('foo', m2, autoload=True)
# MySQLdb 1.2.3 and also need to pass either use_unicode=1
# or charset=utf8 to the URL.
t.insert().execute(id=1, data=u('some text'))
- assert isinstance(testing.db.scalar(select([t.c.data])), util.text_type)
+ assert isinstance(testing.db.scalar(select([t.c.data])),
+ util.text_type)
def test_bit_50(self):
"""Exercise BIT types on 5.0+ (not valid for all engine types)"""
row = table.select().execute().first()
try:
self.assert_(list(row) == expected)
- except:
+ except Exception:
print("Storing %s" % store)
print("Expected %s" % expected)
print("Found %s" % list(row))
Column('b2', Boolean),
Column('b3', mysql.MSTinyInteger(1)),
Column('b4', mysql.MSTinyInteger(1, unsigned=True)),
- Column('b5', mysql.MSTinyInteger),
- )
+ Column('b5', mysql.MSTinyInteger))
self.metadata.create_all()
table = bool_table
Column('b2', Boolean),
Column('b3', BOOLEAN),
Column('b4', BOOLEAN),
- autoload=True,
- )
+ autoload=True)
eq_(colspec(table.c.b3), 'b3 BOOL')
eq_(colspec(table.c.b4), 'b4 BOOL')
roundtrip([None, None, None, None, None])
def test_timestamp_fsp(self):
self.assert_compile(
- mysql.TIMESTAMP(fsp=5),
- "TIMESTAMP(5)"
+ mysql.TIMESTAMP(fsp=5),
+ "TIMESTAMP(5)"
)
def test_timestamp_defaults(self):
]
)
-
def test_datetime_generic(self):
self.assert_compile(
- mysql.DATETIME(),
- "DATETIME"
+ mysql.DATETIME(),
+ "DATETIME"
)
def test_datetime_fsp(self):
self.assert_compile(
- mysql.DATETIME(fsp=4),
- "DATETIME(4)"
+ mysql.DATETIME(fsp=4),
+ "DATETIME(4)"
)
-
def test_time_generic(self):
""""Exercise TIME."""
self.assert_compile(
- mysql.TIME(),
- "TIME"
+ mysql.TIME(),
+ "TIME"
)
def test_time_fsp(self):
self.assert_compile(
- mysql.TIME(fsp=5),
- "TIME(5)"
+ mysql.TIME(fsp=5),
+ "TIME(5)"
)
def test_time_result_processor(self):
eq_(
mysql.TIME().result_processor(None, None)(
- datetime.timedelta(seconds=35, minutes=517,
- microseconds=450
- )),
+ datetime.timedelta(seconds=35, minutes=517,
+ microseconds=450)),
datetime.time(8, 37, 35, 450)
)
@testing.provide_metadata
def test_time_roundtrip(self):
t = Table('mysql_time', self.metadata,
- Column('t1', mysql.TIME())
- )
+ Column('t1', mysql.TIME()))
t.create()
t.insert().values(t1=datetime.time(8, 37, 35)).execute()
eq_(select([t.c.t1]).scalar(), datetime.time(8, 37, 35))
'mysql_enum', self.metadata,
Column('e1', e1),
Column('e2', e2, nullable=False),
- Column(
- 'e2generic',
- Enum("a", "b", validate_strings=True), nullable=False),
+ Column('e2generic',
+ Enum("a", "b", validate_strings=True), nullable=False),
Column('e3', e3),
Column('e4', e4,
nullable=False),
t.create()
with testing.db.connect() as conn:
- conn.execute(t.insert(), {"e1": "nonexistent", "e2": "nonexistent"})
+ conn.execute(t.insert(),
+ {"e1": "nonexistent", "e2": "nonexistent"})
conn.execute(t.insert(), {"e1": "", "e2": ""})
conn.execute(t.insert(), {"e1": "two", "e2": "two"})
conn.execute(t.insert(), {"e1": None, "e2": None})
def colspec(c):
return testing.db.dialect.ddl_compiler(
testing.db.dialect, None).get_column_specification(c)
-
from sqlalchemy.util import u, OrderedDict
from sqlalchemy.dialects.postgresql import aggregate_order_by, insert
+
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
__prefer__ = 'postgresql'
m = MetaData()
tbl = Table('testtbl', m, Column('data', String))
- idx1 = Index('test_idx1', tbl.c.data)
- idx2 = Index('test_idx2', tbl.c.data, postgresql_tablespace='sometablespace')
- idx3 = Index('test_idx3', tbl.c.data, postgresql_tablespace='another table space')
+ idx1 = Index('test_idx1',
+ tbl.c.data)
+ idx2 = Index('test_idx2',
+ tbl.c.data,
+ postgresql_tablespace='sometablespace')
+ idx3 = Index('test_idx3',
+ tbl.c.data,
+ postgresql_tablespace='another table space')
self.assert_compile(schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl '
m = MetaData()
tbl = Table('testtbl', m, Column('data', String))
- idx1 = Index(
- 'test_idx1',
- tbl.c.data,
- postgresql_using='btree',
- postgresql_tablespace='atablespace',
- postgresql_with={"fillfactor": 60},
- postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10))
+ idx1 = Index('test_idx1',
+ tbl.c.data,
+ postgresql_using='btree',
+ postgresql_tablespace='atablespace',
+ postgresql_with={"fillfactor": 60},
+ postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10))
self.assert_compile(schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl '
postgresql.array([1, 2]) == [3, 4, 5],
"ARRAY[%(param_1)s, %(param_2)s] = "
"ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]",
- checkparams={'param_5': 5, 'param_4': 4, 'param_1': 1,
- 'param_3': 3, 'param_2': 2}
+ checkparams={'param_5': 5,
+ 'param_4': 4,
+ 'param_1': 1,
+ 'param_3': 3,
+ 'param_2': 2}
)
stmt2 = sql_util.ClauseAdapter(a1).traverse(stmt)
self.assert_compile(
stmt2,
- "SELECT array_agg(foo.a ORDER BY foo.b DESC) AS array_agg_1 FROM table1 AS foo"
+ "SELECT array_agg(foo.a ORDER BY foo.b DESC) AS array_agg_1 "
+ "FROM table1 AS foo"
)
r = t.insert().execute(user_name='user',
user_password='lala')
assert r.inserted_primary_key == [1]
- l = t.select().execute().fetchall()
- assert l == [(1, 'user', 'lala')]
+ result = t.select().execute().fetchall()
+ assert result == [(1, 'user', 'lala')]
finally:
testing.db.execute('drop table speedy_users')
conn.scalar(sql.select([bind_targets.c.data])),
"new updated data processed"
)
-
tmp.sort()
r1, r2 = [idx[1] for idx in tmp]
assert r1.name == 'idx2'
- assert r1.unique == True
- assert r2.unique == False
+ assert r1.unique is True
+ assert r2.unique is False
assert [t2.c.id] == r1.columns
assert [t2.c.name] == r2.columns
psycopg2, psycopg2cffi, base
dialects = (pg8000.dialect(), pygresql.dialect(),
- psycopg2.dialect(), psycopg2cffi.dialect())
+ psycopg2.dialect(), psycopg2cffi.dialect())
for dialect in dialects:
typ = Numeric().dialect_impl(dialect)
for code in base._INT_TYPES + base._FLOAT_TYPES + \
def test_array_comparison(self):
arrtable = self.tables.arrtable
arrtable.insert().execute(id=5, intarr=[1, 2, 3],
- strarr=[util.u('abc'), util.u('def')])
- results = select([arrtable.c.id]).\
- where(arrtable.c.intarr < [4, 5, 6]).execute()\
- .fetchall()
+ strarr=[util.u('abc'), util.u('def')])
+ results = select([arrtable.c.id])\
+ .where(arrtable.c.intarr < [4, 5, 6])\
+ .execute()\
+ .fetchall()
eq_(len(results), 1)
eq_(results[0][0], 5)
def test_where_getitem(self):
self._test_where(
- self.hashcol['bar'] == None,
+ self.hashcol['bar'] == None, # noqa
"(test_table.hash -> %(hash_1)s) IS NULL"
)
def test_cols_against_is(self):
self._test_cols(
- self.hashcol['foo'] != None,
+ self.hashcol['foo'] != None, # noqa
"(test_table.hash -> %(hash_1)s) IS NOT NULL AS anon_1"
)
# do anything
def test_where_getitem(self):
self._test_where(
- self.jsoncol['bar'] == None,
+ self.jsoncol['bar'] == None, # noqa
"(test_table.test_column -> %(test_column_1)s) IS NULL"
)
def test_where_path(self):
self._test_where(
- self.jsoncol[("foo", 1)] == None,
+ self.jsoncol[("foo", 1)] == None, # noqa
"(test_table.test_column #> %(test_column_1)s) IS NULL"
)
def test_where_getitem_as_text(self):
self._test_where(
- self.jsoncol['bar'].astext == None,
+ self.jsoncol['bar'].astext == None, # noqa
"(test_table.test_column ->> %(test_column_1)s) IS NULL"
)
def test_where_path_as_text(self):
self._test_where(
- self.jsoncol[("foo", 1)].astext == None,
+ self.jsoncol[("foo", 1)].astext == None, # noqa
"(test_table.test_column #>> %(test_column_1)s) IS NULL"
)
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.sql import table, column
from sqlalchemy import types as sqltypes
-from sqlalchemy.testing import fixtures, AssertsExecutionResults, AssertsCompiledSQL
+from sqlalchemy.testing import (fixtures,
+ AssertsExecutionResults,
+ AssertsCompiledSQL)
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy import String, VARCHAR, NVARCHAR, Unicode, Integer,\
func, insert, update, MetaData, select, Table, Column, text,\
Sequence, Float
from sqlalchemy import schema
+from sqlalchemy.testing.mock import Mock, call
+
class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
"Test Firebird domains"
con.execute('CREATE DOMAIN img_domain AS BLOB SUB_TYPE '
'BINARY')
except ProgrammingError as e:
- if not 'attempt to store duplicate value' in str(e):
+ if 'attempt to store duplicate value' not in str(e):
raise e
con.execute('''CREATE GENERATOR gen_testtable_id''')
con.execute('''CREATE TABLE testtable (question int_domain,
'dt',
'redundant',
]),
- "Columns of reflected table didn't equal expected "
- "columns")
+ "Columns of reflected table didn't equal expected "
+ "columns")
eq_(table.c.question.primary_key, True)
# disabled per http://www.sqlalchemy.org/trac/ticket/1660
__only_on__ = 'firebird'
# NB: spacing and newlines are *significant* here!
- # PS: this test is superfluous on recent FB, where the issue 356 is probably fixed...
+ # PS: this test is superfluous on recent FB, where the issue 356 is
+ # probably fixed...
AUTOINC_DM = """\
CREATE DOMAIN AUTOINC_DM
exc.CompileError,
"VARCHAR requires a length on dialect firebird",
type_.compile,
- dialect=firebird.dialect())
+ dialect=firebird.dialect())
t1 = Table('sometable', MetaData(),
- Column('somecolumn', type_)
- )
+ Column('somecolumn', type_))
assert_raises_message(
exc.CompileError,
r"\(in table 'sometable', column 'somecolumn'\)\: "
self.assert_compile(func.current_time(), 'CURRENT_TIME')
self.assert_compile(func.foo(), 'foo')
m = MetaData()
- t = Table('sometable', m, Column('col1', Integer), Column('col2'
- , Integer))
+ t = Table('sometable',
+ m,
+ Column('col1', Integer),
+ Column('col2', Integer))
self.assert_compile(select([func.max(t.c.col1)]),
'SELECT max(sometable.col1) AS max_1 FROM '
'sometable')
'SUBSTRING(:substring_1 FROM :substring_2)')
def test_update_returning(self):
- table1 = table('mytable', column('myid', Integer), column('name'
- , String(128)), column('description',
- String(128)))
- u = update(table1, values=dict(name='foo'
- )).returning(table1.c.myid, table1.c.name)
+ table1 = table('mytable',
+ column('myid', Integer),
+ column('name', String(128)),
+ column('description', String(128)))
+ u = update(table1, values=dict(name='foo'))\
+ .returning(table1.c.myid, table1.c.name)
self.assert_compile(u,
'UPDATE mytable SET name=:name RETURNING '
'mytable.myid, mytable.name')
'UPDATE mytable SET name=:name RETURNING '
'mytable.myid, mytable.name, '
'mytable.description')
- u = update(table1, values=dict(name='foo'
- )).returning(func.length(table1.c.name))
+ u = update(table1, values=dict(name='foo')) \
+ .returning(func.length(table1.c.name))
self.assert_compile(u,
'UPDATE mytable SET name=:name RETURNING '
'char_length(mytable.name) AS length_1')
def test_insert_returning(self):
- table1 = table('mytable', column('myid', Integer), column('name'
- , String(128)), column('description',
- String(128)))
- i = insert(table1, values=dict(name='foo'
- )).returning(table1.c.myid, table1.c.name)
+ table1 = table('mytable',
+ column('myid', Integer),
+ column('name', String(128)),
+ column('description', String(128)))
+ i = insert(table1, values=dict(name='foo'))\
+ .returning(table1.c.myid, table1.c.name)
self.assert_compile(i,
'INSERT INTO mytable (name) VALUES (:name) '
'RETURNING mytable.myid, mytable.name')
'INSERT INTO mytable (name) VALUES (:name) '
'RETURNING mytable.myid, mytable.name, '
'mytable.description')
- i = insert(table1, values=dict(name='foo'
- )).returning(func.length(table1.c.name))
+ i = insert(table1, values=dict(name='foo'))\
+ .returning(func.length(table1.c.name))
self.assert_compile(i,
'INSERT INTO mytable (name) VALUES (:name) '
'RETURNING char_length(mytable.name) AS '
column("$somecol"),
'"$somecol"'
)
+
+
class TypesTest(fixtures.TestBase):
__only_on__ = 'firebird'
def test_infinite_float(self):
metadata = self.metadata
t = Table('t', metadata,
- Column('data', Float)
- )
+ Column('data', Float))
metadata.create_all()
t.insert().execute(data=float('inf'))
eq_(t.select().execute().fetchall(),
- [(float('inf'),)]
- )
+ [(float('inf'),)])
+
class MiscTest(fixtures.TestBase):
# string length the UDF was declared to accept). This test
# checks that at least it works ok in other cases.
- t = Table('t1', metadata, Column('id', Integer,
- Sequence('t1idseq'), primary_key=True), Column('name'
- , String(10)))
+ t = Table('t1',
+ metadata,
+ Column('id', Integer, Sequence('t1idseq'), primary_key=True),
+ Column('name', String(10)))
metadata.create_all()
t.insert(values=dict(name='dante')).execute()
t.insert(values=dict(name='alighieri')).execute()
metadata.bind = engine
t = Table('t1', metadata, Column('data', String(10)))
metadata.create_all()
- r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data'
- : 'd3'})
- r = t.update().where(t.c.data == 'd2').values(data='d3'
- ).execute()
+ r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data': 'd3'})
+ r = t.update().where(t.c.data == 'd2').values(data='d3').execute()
eq_(r.rowcount, 1)
r = t.delete().where(t.c.data == 'd3').execute()
eq_(r.rowcount, 2)
t.delete().execution_options(enable_rowcount=False).execute()
eq_(r.rowcount, -1)
engine.dispose()
- engine = engines.testing_engine(options={'enable_rowcount'
- : False})
+ engine = engines.testing_engine(options={'enable_rowcount': False})
assert not engine.dialect.supports_sane_rowcount
metadata.bind = engine
- r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data'
- : 'd3'})
- r = t.update().where(t.c.data == 'd2').values(data='d3'
- ).execute()
+ r = t.insert().execute({'data': 'd1'}, {'data': 'd2'}, {'data': 'd3'})
+ r = t.update().where(t.c.data == 'd2').values(data='d3').execute()
eq_(r.rowcount, -1)
r = t.delete().where(t.c.data == 'd3').execute()
eq_(r.rowcount, -1)
'hello % world'):
eq_(testing.db.scalar(expr), result)
-from sqlalchemy.testing.mock import Mock, call
-
class ArgumentTest(fixtures.TestBase):
def _dbapi(self):
_initialize=False
)
)
- engine = engines.testing_engine("firebird+%s://" % type_,
- options=kw)
+ engine = engines.testing_engine("firebird+%s://" % type_, options=kw)
return engine
def test_retaining_flag_default_kinterbasdb(self):
engine = self._engine("fdb", retaining=False)
self._assert_retaining(engine, False)
-
def _assert_retaining(self, engine, flag):
conn = engine.connect()
trans = conn.begin()
engine.dialect.dbapi.connect.return_value.rollback.mock_calls,
[call(flag)]
)
-
-
-
-
-
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock
+
def mock_dbapi():
return Mock(paramstyle='qmark',
connect=Mock(
- return_value=Mock(
- cursor=Mock(
- return_value=Mock(
- description=None,
- rowcount=None)
- )
- )
- )
- )
+ return_value=Mock(
+ cursor=Mock(return_value=Mock(description=None,
+ rowcount=None)))))
+
class MxODBCTest(fixtures.TestBase):
dbapi = mock_dbapi()
engine = engines.testing_engine('mssql+mxodbc://localhost',
- options={'module': dbapi, '_initialize': False})
+ options={'module': dbapi,
+ '_initialize': False})
conn = engine.connect()
# crud: uses execute
conn.execute(t1.select())
# manual flagging
- conn.execution_options(native_odbc_execute=True).\
- execute(t1.select())
- conn.execution_options(native_odbc_execute=False).\
- execute(t1.insert().values(c1='foo'))
+ conn.execution_options(native_odbc_execute=True).execute(t1.select())
+ conn.execution_options(native_odbc_execute=False)\
+ .execute(t1.insert().values(c1='foo'))
- eq_(
- [c[2] for c in
+ eq_([c[2] for c in
dbapi.connect.return_value.cursor.return_value.execute.mock_calls],
[{'direct': True}, {'direct': True}, {'direct': True},
- {'direct': True}, {'direct': False}, {'direct': True}]
- )
+ {'direct': True}, {'direct': False}, {'direct': True}])
from sqlalchemy import types as sqltypes, exc, schema
from sqlalchemy.sql import table, column
from sqlalchemy.sql.elements import quoted_name
-from sqlalchemy.testing import fixtures, AssertsExecutionResults, AssertsCompiledSQL
+from sqlalchemy.testing import (fixtures,
+ AssertsExecutionResults,
+ AssertsCompiledSQL)
from sqlalchemy import testing
from sqlalchemy.util import u, b
from sqlalchemy import util
from sqlalchemy import sql
from sqlalchemy.testing.mock import Mock
+
class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'oracle+cx_oracle'
__backend__ = True
def test_out_params(self):
result = testing.db.execute(text('begin foo(:x_in, :x_out, :y_out, '
- ':z_out); end;',
- bindparams=[bindparam('x_in', Float),
- outparam('x_out', Integer),
- outparam('y_out', Float),
- outparam('z_out', String)]), x_in=5)
+ ':z_out); end;',
+ bindparams=[bindparam('x_in', Float),
+ outparam('x_out', Integer),
+ outparam('y_out', Float),
+ outparam('z_out', String)]),
+ x_in=5)
eq_(result.out_parameters,
{'x_out': 10, 'y_out': 75, 'z_out': None})
assert isinstance(result.out_parameters['x_out'], int)
def teardown_class(cls):
testing.db.execute("DROP PROCEDURE foo")
+
class CXOracleArgsTest(fixtures.TestBase):
__only_on__ = 'oracle+cx_oracle'
__backend__ = True
def test_exclude_inputsizes_custom(self):
import cx_Oracle
dialect = cx_oracle.dialect(dbapi=cx_Oracle,
- exclude_setinputsizes=('NCLOB',))
+ exclude_setinputsizes=('NCLOB',))
eq_(dialect.exclude_setinputsizes, set([cx_Oracle.NCLOB]))
+
class QuotedBindRoundTripTest(fixtures.TestBase):
__only_on__ = 'oracle'
metadata = self.metadata
table = Table("t1", metadata,
- Column("option", Integer),
- Column("plain", Integer, quote=True),
- # test that quote works for a reserved word
- # that the dialect isn't aware of when quote
- # is set
- Column("union", Integer, quote=True)
- )
+ Column("option", Integer),
+ Column("plain", Integer, quote=True),
+ # test that quote works for a reserved word
+ # that the dialect isn't aware of when quote
+ # is set
+ Column("union", Integer, quote=True))
metadata.create_all()
table.insert().execute(
testing.db.scalar(t.select()), 10
)
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = "oracle" #oracle.dialect()
+ __dialect__ = "oracle" # oracle.dialect()
def test_true_false(self):
self.assert_compile(
s = select([s.c.col1, s.c.col2])
self.assert_compile(s, "SELECT col1, col2 FROM (SELECT "
- "sometable.col1 AS col1, sometable.col2 "
- "AS col2 FROM sometable)")
+ "sometable.col1 AS col1, sometable.col2 "
+ "AS col2 FROM sometable)")
def test_bindparam_quote(self):
"""test that bound parameters take on quoting for reserved words,
)
t = Table("s", MetaData(), Column('plain', Integer, quote=True))
self.assert_compile(
- t.insert().values(plain=5), 'INSERT INTO s ("plain") VALUES (:"plain")'
+ t.insert().values(plain=5),
+ 'INSERT INTO s ("plain") VALUES (:"plain")'
)
self.assert_compile(
t.update().values(plain=5), 'UPDATE s SET "plain"=:"plain"'
'FROM (SELECT col1, col2, ROWNUM AS ora_rn '
'FROM (SELECT sometable.col1 AS col1, '
'sometable.col2 AS col2 FROM sometable) '
- 'WHERE ROWNUM <= :param_1 + :param_2) WHERE ora_rn > '
- ':param_2)',
+ 'WHERE ROWNUM <= :param_1 + :param_2) '
+ 'WHERE ora_rn > :param_2)',
checkparams={'param_1': 10, 'param_2': 20})
self.assert_compile(s2,
'FROM (SELECT col1, col2, ROWNUM AS ora_rn '
'FROM (SELECT sometable.col1 AS col1, '
'sometable.col2 AS col2 FROM sometable) '
- 'WHERE ROWNUM <= :param_1 + :param_2) WHERE ora_rn > '
- ':param_2)')
+ 'WHERE ROWNUM <= :param_1 + :param_2) '
+ 'WHERE ora_rn > :param_2)')
c = s2.compile(dialect=oracle.OracleDialect())
eq_(len(c._result_columns), 2)
assert s.c.col1 in set(c._create_result_map()['col1'][1])
def test_for_update(self):
table1 = table('mytable',
- column('myid'), column('name'), column('description'))
+ column('myid'), column('name'), column('description'))
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(),
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
self.assert_compile(
- table1.select(table1.c.myid == 7).with_for_update(of=table1.c.myid),
+ table1
+ .select(table1.c.myid == 7)
+ .with_for_update(of=table1.c.myid),
"SELECT mytable.myid, mytable.name, mytable.description "
- "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF mytable.myid")
+ "FROM mytable WHERE mytable.myid = :myid_1 "
+ "FOR UPDATE OF mytable.myid")
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(nowait=True),
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE NOWAIT")
self.assert_compile(
- table1.select(table1.c.myid == 7).
- with_for_update(nowait=True, of=table1.c.myid),
+ table1
+ .select(table1.c.myid == 7)
+ .with_for_update(nowait=True, of=table1.c.myid),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 "
"FOR UPDATE OF mytable.myid NOWAIT")
self.assert_compile(
- table1.select(table1.c.myid == 7).
- with_for_update(nowait=True, of=[table1.c.myid, table1.c.name]),
+ table1
+ .select(table1.c.myid == 7)
+ .with_for_update(nowait=True, of=[table1.c.myid, table1.c.name]),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF "
"mytable.myid, mytable.name NOWAIT")
self.assert_compile(
- table1.select(table1.c.myid == 7).
- with_for_update(skip_locked=True, of=[table1.c.myid, table1.c.name]),
+ table1.select(table1.c.myid == 7)
+ .with_for_update(skip_locked=True,
+ of=[table1.c.myid, table1.c.name]),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF "
"mytable.myid, mytable.name SKIP LOCKED")
# key_share has no effect
self.assert_compile(
- table1.select(table1.c.myid == 7).
- with_for_update(key_share=True),
+ table1.select(table1.c.myid == 7).with_for_update(key_share=True),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
# read has no effect
self.assert_compile(
- table1.select(table1.c.myid == 7).
- with_for_update(read=True, key_share=True),
+ table1
+ .select(table1.c.myid == 7)
+ .with_for_update(read=True, key_share=True),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE")
ta = table1.alias()
self.assert_compile(
- ta.select(ta.c.myid == 7).
- with_for_update(of=[ta.c.myid, ta.c.name]),
+ ta
+ .select(ta.c.myid == 7)
+ .with_for_update(of=[ta.c.myid, ta.c.name]),
"SELECT mytable_1.myid, mytable_1.name, mytable_1.description "
"FROM mytable mytable_1 "
"WHERE mytable_1.myid = :myid_1 FOR UPDATE OF "
t = table('sometable', column('col1'), column('col2'))
dialect = oracle.OracleDialect(use_binds_for_limits=False)
- self.assert_compile(select([t]).limit(10),
- "SELECT col1, col2 FROM (SELECT sometable.col1 AS col1, "
- "sometable.col2 AS col2 FROM sometable) WHERE ROWNUM <= 10",
- dialect=dialect)
+ self.assert_compile(
+ select([t]).limit(10),
+ "SELECT col1, col2 FROM (SELECT sometable.col1 AS col1, "
+ "sometable.col2 AS col2 FROM sometable) WHERE ROWNUM <= 10",
+ dialect=dialect)
- self.assert_compile(select([t]).offset(10),
- "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
- "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
- "FROM sometable)) WHERE ora_rn > 10",
- dialect=dialect)
+ self.assert_compile(
+ select([t]).offset(10),
+ "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
+ "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
+ "FROM sometable)) WHERE ora_rn > 10",
+ dialect=dialect)
- self.assert_compile(select([t]).limit(10).offset(10),
- "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
- "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
- "FROM sometable) WHERE ROWNUM <= 20) WHERE ora_rn > 10",
- dialect=dialect)
+ self.assert_compile(
+ select([t]).limit(10).offset(10),
+ "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
+ "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
+ "FROM sometable) WHERE ROWNUM <= 20) WHERE ora_rn > 10",
+ dialect=dialect)
def test_use_binds_for_limits_enabled(self):
t = table('sometable', column('col1'), column('col2'))
dialect = oracle.OracleDialect(use_binds_for_limits=True)
- self.assert_compile(select([t]).limit(10),
- "SELECT col1, col2 FROM (SELECT sometable.col1 AS col1, "
- "sometable.col2 AS col2 FROM sometable) WHERE ROWNUM "
- "<= :param_1",
- dialect=dialect)
-
- self.assert_compile(select([t]).offset(10),
- "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
- "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
- "FROM sometable)) WHERE ora_rn > :param_1",
- dialect=dialect)
-
- self.assert_compile(select([t]).limit(10).offset(10),
- "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
- "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
- "FROM sometable) WHERE ROWNUM <= :param_1 + :param_2) "
- "WHERE ora_rn > :param_2",
- dialect=dialect,
- checkparams={'param_1': 10, 'param_2': 10})
+ self.assert_compile(
+ select([t]).limit(10),
+ "SELECT col1, col2 FROM (SELECT sometable.col1 AS col1, "
+ "sometable.col2 AS col2 FROM sometable) WHERE ROWNUM "
+ "<= :param_1",
+ dialect=dialect)
+
+ self.assert_compile(
+ select([t]).offset(10),
+ "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
+ "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
+ "FROM sometable)) WHERE ora_rn > :param_1",
+ dialect=dialect)
+
+ self.assert_compile(
+ select([t]).limit(10).offset(10),
+ "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
+ "FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
+ "FROM sometable) WHERE ROWNUM <= :param_1 + :param_2) "
+ "WHERE ora_rn > :param_2",
+ dialect=dialect,
+ checkparams={'param_1': 10, 'param_2': 10})
def test_long_labels(self):
dialect = default.DefaultDialect()
m,
Column('id', Integer, primary_key=True),
Column('thirty_characters_table_id',
- Integer,
- ForeignKey('thirty_characters_table_xxxxxx.id'),
- primary_key=True
- )
- )
+ Integer,
+ ForeignKey('thirty_characters_table_xxxxxx.id'),
+ primary_key=True))
anon = a_table.alias()
self.assert_compile(select([other_table,
def test_outer_join(self):
table1 = table('mytable',
- column('myid', Integer),
- column('name', String),
- column('description', String),
- )
+ column('myid', Integer),
+ column('name', String),
+ column('description', String))
table2 = table(
'myothertable',
column('otherstuff', String),
)
- query = select([table1, table2], or_(table1.c.name == 'fred',
- table1.c.myid == 10, table2.c.othername != 'jack',
- text('EXISTS (select yay from foo where boo = lar)')
- ), from_obj=[outerjoin(table1, table2,
- table1.c.myid == table2.c.otherid)])
+ query = select([table1, table2],
+ or_(table1.c.name == 'fred',
+ table1.c.myid == 10, table2.c.othername != 'jack',
+ text('EXISTS (select yay from foo where boo = lar)')
+ ),
+ from_obj=[outerjoin(table1,
+ table2,
+ table1.c.myid == table2.c.otherid)])
self.assert_compile(query,
'SELECT mytable.myid, mytable.name, '
'mytable.description, myothertable.otherid,'
'lar)) AND mytable.myid = '
'myothertable.otherid(+)',
dialect=oracle.OracleDialect(use_ansi=False))
- query = table1.outerjoin(table2, table1.c.myid
- == table2.c.otherid).outerjoin(table3,
- table3.c.userid == table2.c.otherid)
+ query = table1.outerjoin(table2,
+ table1.c.myid == table2.c.otherid) \
+ .outerjoin(table3, table3.c.userid == table2.c.otherid)
self.assert_compile(query.select(),
'SELECT mytable.myid, mytable.name, '
'mytable.description, myothertable.otherid,'
'myothertable.otherid AND mytable.myid = '
'myothertable.otherid(+)',
dialect=oracle.dialect(use_ansi=False))
- query = table1.join(table2, table1.c.myid
- == table2.c.otherid).join(table3,
- table3.c.userid == table2.c.otherid)
+ query = table1.join(table2,
+ table1.c.myid == table2.c.otherid) \
+ .join(table3, table3.c.userid == table2.c.otherid)
self.assert_compile(query.select(),
'SELECT mytable.myid, mytable.name, '
'mytable.description, myothertable.otherid,'
'myothertable.otherid AND mytable.myid = '
'myothertable.otherid',
dialect=oracle.dialect(use_ansi=False))
- query = table1.join(table2, table1.c.myid
- == table2.c.otherid).outerjoin(table3,
- table3.c.userid == table2.c.otherid)
+ query = table1.join(table2,
+ table1.c.myid == table2.c.otherid) \
+ .outerjoin(table3, table3.c.userid == table2.c.otherid)
self.assert_compile(query.select().order_by(table1.c.name).
- limit(10).offset(5),
+ limit(10).offset(5),
'SELECT myid, name, description, otherid, '
'othername, userid, otherstuff FROM '
'(SELECT myid, name, description, otherid, '
'mytable, myothertable, thirdtable WHERE '
'thirdtable.userid(+) = '
'myothertable.otherid AND mytable.myid = '
- 'myothertable.otherid ORDER BY '
- 'mytable.name) WHERE ROWNUM <= :param_1 + :param_2) '
+ 'myothertable.otherid ORDER BY mytable.name) '
+ 'WHERE ROWNUM <= :param_1 + :param_2) '
'WHERE ora_rn > :param_2',
checkparams={'param_1': 10, 'param_2': 5},
dialect=oracle.dialect(use_ansi=False))
- subq = select([table1]).select_from(table1.outerjoin(table2,
- table1.c.myid == table2.c.otherid)).alias()
- q = select([table3]).select_from(table3.outerjoin(subq,
- table3.c.userid == subq.c.myid))
+ subq = select([table1]).select_from(
+ table1.outerjoin(table2, table1.c.myid == table2.c.otherid)) \
+ .alias()
+ q = select([table3]).select_from(
+ table3.outerjoin(subq, table3.c.userid == subq.c.myid))
self.assert_compile(q,
'SELECT thirdtable.userid, '
'SELECT mytable.name FROM mytable WHERE '
'mytable.name = :name_1',
dialect=oracle.dialect(use_ansi=False))
- subq = select([table3.c.otherstuff]).where(table3.c.otherstuff
- == table1.c.name).label('bar')
+ subq = select([table3.c.otherstuff]) \
+ .where(table3.c.otherstuff == table1.c.name).label('bar')
q = select([table1.c.name, subq])
self.assert_compile(q,
'SELECT mytable.name, (SELECT '
dialect=oracle.OracleDialect(use_ansi=False)
)
-
j = a.outerjoin(b.join(c, b.c.b == c.c.c), a.c.a == b.c.b)
self.assert_compile(
dialect=oracle.OracleDialect(use_ansi=False)
)
-
def test_alias_outer_join(self):
address_types = table('address_types', column('id'),
column('name'))
column('address_type_id'),
column('email_address'))
at_alias = address_types.alias()
- s = select([at_alias,
- addresses]).select_from(addresses.outerjoin(at_alias,
- addresses.c.address_type_id
- == at_alias.c.id)).where(addresses.c.user_id
- == 7).order_by(addresses.c.id, address_types.c.id)
+ s = select([at_alias, addresses]) \
+ .select_from(
+ addresses.outerjoin(
+ at_alias,
+ addresses.c.address_type_id == at_alias.c.id)) \
+ .where(addresses.c.user_id == 7) \
+ .order_by(addresses.c.id, address_types.c.id)
self.assert_compile(s,
'SELECT address_types_1.id, '
'address_types_1.name, addresses.id, '
self.assert_compile(
t1.insert().values(c1=1).returning(t1.c.c2, t1.c.c3),
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
- "t1.c2, t1.c3 INTO :ret_0, :ret_1"
- )
+ "t1.c2, t1.c3 INTO :ret_0, :ret_1")
def test_returning_insert_functional(self):
- t1 = table('t1', column('c1'), column('c2', String()), column('c3', String()))
+ t1 = table('t1',
+ column('c1'),
+ column('c2', String()),
+ column('c3', String()))
fn = func.lower(t1.c.c2, type_=String())
stmt = t1.insert().values(c1=1).returning(fn, t1.c.c3)
compiled = stmt.compile(dialect=oracle.dialect())
- eq_(
- compiled._create_result_map(),
+ eq_(compiled._create_result_map(),
{'ret_1': ('ret_1', (t1.c.c3, 'c3', 'c3'), t1.c.c3.type),
- 'ret_0': ('ret_0', (fn, 'lower', None), fn.type)}
-
- )
+ 'ret_0': ('ret_0', (fn, 'lower', None), fn.type)})
self.assert_compile(
stmt,
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
- "lower(t1.c2), t1.c3 INTO :ret_0, :ret_1"
- )
+ "lower(t1.c2), t1.c3 INTO :ret_0, :ret_1")
def test_returning_insert_labeled(self):
t1 = table('t1', column('c1'), column('c2'), column('c3'))
t1.insert().values(c1=1).returning(
t1.c.c2.label('c2_l'), t1.c.c3.label('c3_l')),
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
- "t1.c2, t1.c3 INTO :ret_0, :ret_1"
- )
+ "t1.c2, t1.c3 INTO :ret_0, :ret_1")
def test_compound(self):
t1 = table('t1', column('c1'), column('c2'), column('c3'))
def test_create_index_alt_schema(self):
m = MetaData()
t1 = Table('foo', m,
- Column('x', Integer),
- schema="alt_schema"
- )
+ Column('x', Integer),
+ schema="alt_schema")
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x)),
"CREATE INDEX alt_schema.bar ON alt_schema.foo (x)"
def test_create_index_expr(self):
m = MetaData()
t1 = Table('foo', m,
- Column('x', Integer)
- )
+ Column('x', Integer))
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x > 5)),
"CREATE INDEX bar ON foo (x > 5)"
"foo (x INTEGER) ON COMMIT PRESERVE ROWS"
)
-
def test_create_table_compress(self):
m = MetaData()
tbl1 = Table('testtbl1', m, Column('data', Integer),
dialect._get_default_schema_name = Mock()
return dialect
-
def test_ora8_flags(self):
dialect = self._dialect((8, 2, 5))
self.assert_compile(Unicode(50), "VARCHAR2(50)", dialect=dialect)
self.assert_compile(UnicodeText(), "CLOB", dialect=dialect)
-
dialect = self._dialect((8, 2, 5), implicit_returning=True)
dialect.initialize(testing.db.connect())
assert dialect.implicit_returning
-
def test_default_flags(self):
"""test with no initialization or server version info"""
schema = testing.db.dialect.default_schema_name
meta = self.metadata
parent = Table('parent', meta,
- Column('pid', Integer, primary_key=True),
- schema=schema
- )
+ Column('pid', Integer, primary_key=True),
+ schema=schema)
child = Table('child', meta,
- Column('cid', Integer, primary_key=True),
- Column('pid', Integer, ForeignKey('%s.parent.pid' % schema)),
- schema=schema
- )
+ Column('cid', Integer, primary_key=True),
+ Column('pid',
+ Integer,
+ ForeignKey('%s.parent.pid' % schema)),
+ schema=schema)
meta.create_all()
parent.insert().execute({'pid': 1})
child.insert().execute({'cid': 1, 'pid': 1})
def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData(testing.db)
- parent = Table('%s_pt' % testing.config.test_schema, meta, autoload=True,
- oracle_resolve_synonyms=True)
+ parent = Table('%s_pt' % testing.config.test_schema,
+ meta,
+ autoload=True,
+ oracle_resolve_synonyms=True)
self.assert_compile(parent.select(),
- "SELECT %(test_schema)s_pt.id, "
- "%(test_schema)s_pt.data FROM %(test_schema)s_pt"
- % {"test_schema": testing.config.test_schema})
+ "SELECT %(test_schema)s_pt.id, "
+ "%(test_schema)s_pt.data FROM %(test_schema)s_pt"
+ % {"test_schema": testing.config.test_schema})
select([parent]).execute().fetchall()
def test_reflect_alt_synonym_owner_local_table(self):
@testing.provide_metadata
def test_create_same_names_implicit_schema(self):
meta = self.metadata
- parent = Table('parent', meta,
- Column('pid', Integer, primary_key=True),
- )
+ parent = Table('parent',
+ meta,
+ Column('pid', Integer, primary_key=True))
child = Table('child', meta,
- Column('cid', Integer, primary_key=True),
- Column('pid', Integer, ForeignKey('parent.pid')),
- )
+ Column('cid', Integer, primary_key=True),
+ Column('pid', Integer, ForeignKey('parent.pid')))
meta.create_all()
parent.insert().execute({'pid': 1})
child.insert().execute({'cid': 1, 'pid': 1})
def test_oracle_has_no_on_update_cascade(self):
bar = Table('bar', self.metadata,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer,
- ForeignKey('foo.id', onupdate='CASCADE')))
+ Column('id', Integer, primary_key=True),
+ Column('foo_id',
+ Integer,
+ ForeignKey('foo.id', onupdate='CASCADE')))
assert_raises(exc.SAWarning, bar.create)
bat = Table('bat', self.metadata,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer),
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer),
ForeignKeyConstraint(['foo_id'], ['foo.id'],
- onupdate='CASCADE'))
+ onupdate='CASCADE'))
assert_raises(exc.SAWarning, bat.create)
@classmethod
def define_tables(cls, metadata):
Table('datatable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
def _connection(self):
conn = testing.db.connect()
testing.db.scalar("select count(*) from datatable"),
rows
)
+
def test_twophase_prepare_false(self):
conn = self._connection()
for i in range(2):
for i in range(2):
trans = conn.begin_twophase()
conn.execute("insert into datatable (id, data) "
- "values (%s, 'somedata')" % i)
+ "values (%s, 'somedata')" % i)
trans.prepare()
trans.commit()
conn.close()
conn = self._connection()
trans = conn.begin_twophase()
conn.execute("insert into datatable (id, data) "
- "values (%s, 'somedata')" % 1)
+ "values (%s, 'somedata')" % 1)
trans.rollback()
trans = conn.begin_twophase()
conn.execute("insert into datatable (id, data) "
- "values (%s, 'somedata')" % 1)
+ "values (%s, 'somedata')" % 1)
trans.prepare()
trans.commit()
conn = self._connection()
trans = conn.begin_twophase()
conn.execute("insert into datatable (id, data) "
- "values (%s, 'somedata')" % 1)
+ "values (%s, 'somedata')" % 1)
trans.commit()
conn.close()
self._assert_data(1)
+
class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = oracle.OracleDialect()
'STRING'
)
-
def test_long(self):
self.assert_compile(oracle.LONG(), "LONG")
(oracle.INTERVAL(second_precision=5),
'INTERVAL DAY TO SECOND(5)'),
(oracle.INTERVAL(day_precision=2,
- second_precision=5),
+ second_precision=5),
'INTERVAL DAY(2) TO SECOND(5)')]:
self.assert_compile(type_, expected)
__dialect__ = oracle.OracleDialect()
__backend__ = True
-
@testing.fails_on('+zxjdbc', 'zxjdbc lacks the FIXED_CHAR dbapi type')
def test_fixed_char(self):
m = MetaData(testing.db)
t = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('data', CHAR(30), nullable=False)
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', CHAR(30), nullable=False))
t.create()
try:
@testing.provide_metadata
def test_rowid(self):
metadata = self.metadata
- t = Table('t1', metadata,
- Column('x', Integer)
- )
+ t = Table('t1', metadata, Column('x', Integer))
t.create()
t.insert().execute(x=5)
s1 = select([t])
# as cx_oracle just treats it as a string,
# but we want to make sure the ROWID works...
rowid_col = column('rowid', oracle.ROWID)
- s3 = select([t.c.x, rowid_col]).\
- where(rowid_col == cast(rowid, oracle.ROWID))
- eq_(s3.select().execute().fetchall(),
- [(5, rowid)]
- )
+ s3 = select([t.c.x, rowid_col]) \
+ .where(rowid_col == cast(rowid, oracle.ROWID))
+ eq_(s3.select().execute().fetchall(), [(5, rowid)])
@testing.fails_on('+zxjdbc',
'Not yet known how to pass values of the '
oracle.INTERVAL(day_precision=3)))
metadata.create_all()
interval_table.insert().\
- execute(day_interval=datetime.timedelta(days=35,
- seconds=5743))
+ execute(day_interval=datetime.timedelta(days=35, seconds=5743))
row = interval_table.select().execute().first()
eq_(row['day_interval'], datetime.timedelta(days=35,
seconds=5743))
def test_numerics(self):
m = self.metadata
t1 = Table('t1', m,
- Column('intcol', Integer),
- Column('numericcol', Numeric(precision=9, scale=2)),
- Column('floatcol1', Float()),
- Column('floatcol2', FLOAT()),
- Column('doubleprec', oracle.DOUBLE_PRECISION),
- Column('numbercol1', oracle.NUMBER(9)),
- Column('numbercol2', oracle.NUMBER(9, 3)),
- Column('numbercol3', oracle.NUMBER),
-
- )
+ Column('intcol', Integer),
+ Column('numericcol', Numeric(precision=9, scale=2)),
+ Column('floatcol1', Float()),
+ Column('floatcol2', FLOAT()),
+ Column('doubleprec', oracle.DOUBLE_PRECISION),
+ Column('numbercol1', oracle.NUMBER(9)),
+ Column('numbercol2', oracle.NUMBER(9, 3)),
+ Column('numbercol3', oracle.NUMBER))
t1.create()
t1.insert().execute(
intcol=1,
# this test requires cx_oracle 5
foo = Table('foo', metadata,
- Column('idata', Integer),
- Column('ndata', Numeric(20, 2)),
- Column('ndata2', Numeric(20, 2)),
- Column('nidata', Numeric(5, 0)),
- Column('fdata', Float()),
- )
+ Column('idata', Integer),
+ Column('ndata', Numeric(20, 2)),
+ Column('ndata2', Numeric(20, 2)),
+ Column('nidata', Numeric(5, 0)),
+ Column('fdata', Float()))
foo.create()
foo.insert().execute({
stmt = "SELECT idata, ndata, ndata2, nidata, fdata FROM foo"
-
row = testing.db.execute(stmt).fetchall()[0]
eq_(
[type(x) for x in row],
SELECT
(SELECT (SELECT idata FROM foo) FROM DUAL) AS idata,
(SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL)
- AS ndata,
- (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) FROM DUAL)
- AS ndata2,
- (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) FROM DUAL)
- AS nidata,
+ AS ndata,
+ (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) FROM DUAL)
+ AS ndata2,
+ (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) FROM DUAL)
+ AS nidata,
(SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata
FROM dual
"""
)
row = testing.db.execute(text(stmt,
- typemap={
- 'idata': Integer(),
- 'ndata': Numeric(20, 2),
- 'ndata2': Numeric(20, 2),
- 'nidata': Numeric(5, 0),
- 'fdata': Float()
- })).fetchall()[0]
+ typemap={
+ 'idata': Integer(),
+ 'ndata': Numeric(20, 2),
+ 'ndata2': Numeric(20, 2),
+ 'nidata': Numeric(5, 0),
+ 'fdata': Float()})).fetchall()[0]
eq_(
[type(x) for x in row],
[int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]
)
row = testing.db.execute(text(stmt,
- typemap={
- 'anon_1_idata': Integer(),
- 'anon_1_ndata': Numeric(20, 2),
- 'anon_1_ndata2': Numeric(20, 2),
- 'anon_1_nidata': Numeric(5, 0),
- 'anon_1_fdata': Float()
- })).fetchall()[0]
+ typemap={
+ 'anon_1_idata': Integer(),
+ 'anon_1_ndata': Numeric(20, 2),
+ 'anon_1_ndata2': Numeric(20, 2),
+ 'anon_1_nidata': Numeric(5, 0),
+ 'anon_1_fdata': Float()
+ })).fetchall()[0]
eq_(
[type(x) for x in row],
[int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]
decimal.Decimal('53'), 45.683920000000001)
)
- row = testing.db.execute(text(stmt,
- typemap={
- 'anon_1_idata': Integer(),
- 'anon_1_ndata': Numeric(20, 2, asdecimal=False),
- 'anon_1_ndata2': Numeric(20, 2, asdecimal=False),
- 'anon_1_nidata': Numeric(5, 0, asdecimal=False),
- 'anon_1_fdata': Float(asdecimal=True)
- })).fetchall()[0]
+ row = testing.db.execute(text(
+ stmt,
+ typemap={
+ 'anon_1_idata': Integer(),
+ 'anon_1_ndata': Numeric(20, 2, asdecimal=False),
+ 'anon_1_ndata2': Numeric(20, 2, asdecimal=False),
+ 'anon_1_nidata': Numeric(5, 0, asdecimal=False),
+ 'anon_1_fdata': Float(asdecimal=True)
+ })).fetchall()[0]
eq_(
[type(x) for x in row],
[int, float, float, float, decimal.Decimal]
(5, 45.6, 45, 53, decimal.Decimal('45.68392'))
)
-
@testing.provide_metadata
def test_reflect_dates(self):
metadata = self.metadata
def test_reflect_all_types_schema(self):
types_table = Table('all_types', MetaData(testing.db),
- Column('owner', String(30), primary_key=True),
- Column('type_name', String(30), primary_key=True),
- autoload=True, oracle_resolve_synonyms=True
- )
+ Column('owner', String(30), primary_key=True),
+ Column('type_name', String(30), primary_key=True),
+ autoload=True, oracle_resolve_synonyms=True)
for row in types_table.select().execute().fetchall():
[row[k] for k in row.keys()]
def test_raw_roundtrip(self):
metadata = self.metadata
raw_table = Table('raw', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', oracle.RAW(35))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', oracle.RAW(35)))
metadata.create_all()
testing.db.execute(raw_table.insert(), id=1, data=b("ABCDEF"))
eq_(
@testing.provide_metadata
def test_reflect_nvarchar(self):
metadata = self.metadata
- Table('t', metadata,
- Column('data', sqltypes.NVARCHAR(255))
- )
+ Table('t', metadata, Column('data', sqltypes.NVARCHAR(255)))
metadata.create_all()
m2 = MetaData(testing.db)
t2 = Table('t', m2, autoload=True)
eq_(res, data)
assert isinstance(res, util.text_type)
-
@testing.provide_metadata
def test_char_length(self):
metadata = self.metadata
t1 = Table('t1', metadata,
- Column("c1", VARCHAR(50)),
- Column("c2", NVARCHAR(250)),
- Column("c3", CHAR(200))
- )
+ Column("c1", VARCHAR(50)),
+ Column("c2", NVARCHAR(250)),
+ Column("c3", CHAR(200)))
t1.create()
m2 = MetaData(testing.db)
t2 = Table('t1', m2, autoload=True)
def test_long_type(self):
metadata = self.metadata
- t = Table('t', metadata,
- Column('data', oracle.LONG)
- )
+ t = Table('t', metadata, Column('data', oracle.LONG))
metadata.create_all(testing.db)
testing.db.execute(t.insert(), data='xyz')
eq_(
def test_lobs_without_convert(self):
engine = testing_engine(options=dict(auto_convert_lobs=False))
metadata = MetaData()
- t = Table("z_test", metadata, Column('id', Integer, primary_key=True),
- Column('data', Text), Column('bindata', LargeBinary))
+ t = Table("z_test",
+ metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', Text),
+ Column('bindata', LargeBinary))
t.create(engine)
try:
- engine.execute(t.insert(), id=1,
- data='this is text',
- bindata=b('this is binary'))
+ engine.execute(t.insert(),
+ id=1,
+ data='this is text',
+ bindata=b('this is binary'))
row = engine.execute(t.select()).first()
eq_(row['data'].read(), 'this is text')
eq_(row['bindata'].read(), b('this is binary'))
finally:
t.drop(engine)
+
class EuroNumericTest(fixtures.TestBase):
- """test the numeric output_type_handler when using non-US locale for NLS_LANG."""
+ """
+ test the numeric output_type_handler when using non-US locale for NLS_LANG.
+ """
__only_on__ = 'oracle+cx_oracle'
__backend__ = True
("SELECT 0.1 FROM DUAL", decimal.Decimal("0.1"), {}),
("SELECT 15 FROM DUAL", 15, {}),
("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL",
- decimal.Decimal("15"), {}),
+ decimal.Decimal("15"), {}),
("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL",
- decimal.Decimal("0.1"), {}),
+ decimal.Decimal("0.1"), {}),
("SELECT :num FROM DUAL", decimal.Decimal("2.5"),
- {'num': decimal.Decimal("2.5")})
+ {'num': decimal.Decimal("2.5")})
]:
test_exp = self.engine.scalar(stmt, **kw)
eq_(
def setup(self):
testing.db.execute("create table my_table (id integer)")
- testing.db.execute("create global temporary table my_temp_table (id integer)")
- testing.db.execute("create table foo_table (id integer) tablespace SYSTEM")
+ testing.db.execute(
+ "create global temporary table my_temp_table (id integer)"
+ )
+ testing.db.execute(
+ "create table foo_table (id integer) tablespace SYSTEM"
+ )
def teardown(self):
testing.db.execute("drop table my_temp_table")
engine = testing_engine(options={"exclude_tablespaces": ["FOO"]})
insp = inspect(engine)
eq_(
- set(insp.get_table_names()).intersection(["my_table", "foo_table"]),
+ set(insp.get_table_names()).intersection(["my_table",
+ "foo_table"]),
set(["my_table", "foo_table"])
)
set(['admin_docindex'])
)
+
class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
__backend__ = True
global binary_table, stream, meta
meta = MetaData(testing.db)
binary_table = Table('binary_table', meta,
- Column('id', Integer, primary_key=True),
- Column('data', LargeBinary)
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', LargeBinary))
meta.create_all()
stream = os.path.join(
os.path.dirname(__file__), "..",
def test_fetch_single_arraysize(self):
eng = testing_engine(options={'arraysize': 1})
result = eng.execute(binary_table.select().
- order_by(binary_table.c.id)).fetchall()
+ order_by(binary_table.c.id)).fetchall()
eq_(result, [(i, stream) for i in range(1, 11)])
+
class UnsupportedIndexReflectTest(fixtures.TestBase):
__only_on__ = 'oracle'
__backend__ = True
def test_reflect_functional_index(self):
metadata = self.metadata
Table('test_index_reflect', metadata,
- Column('data', String(20), primary_key=True)
- )
+ Column('data', String(20), primary_key=True))
metadata.create_all()
testing.db.execute('CREATE INDEX DATA_IDX ON '
"select * from v$version"):
return True
return False
- except:
+ except Exception:
return True
"select * from v$version"):
return True
return False
- except:
+ except Exception:
return True
assert tbl.dialect_options['oracle']['compress'] == "OLTP"
-
-
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = 'oracle'
__backend__ = True
metadata = self.metadata
table = Table("sometable", metadata,
- Column("id_a", Unicode(255), primary_key=True),
- Column("id_b", Unicode(255), primary_key=True, unique=True),
- Column("group", Unicode(255), primary_key=True),
- Column("col", Unicode(255)),
- UniqueConstraint('col', 'group'),
- )
+ Column("id_a", Unicode(255), primary_key=True),
+ Column("id_b",
+ Unicode(255),
+ primary_key=True,
+ unique=True),
+ Column("group", Unicode(255), primary_key=True),
+ Column("col", Unicode(255)),
+ UniqueConstraint('col', 'group'))
# "group" is a keyword, so lower case
normalind = Index('tableind', table.c.id_b, table.c.group)
inspect.reflect()
def obj_definition(obj):
- return obj.__class__, tuple([c.name for c in
- obj.columns]), getattr(obj, 'unique', None)
+ return (obj.__class__,
+ tuple([c.name for c in obj.columns]),
+ getattr(obj, 'unique', None))
# find what the primary k constraint name should be
primaryconsname = testing.db.scalar(
eq_(len(reflectedtable.constraints), 1)
eq_(len(reflectedtable.indexes), 5)
+
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
def test_basic(self):
# oracle can't actually do the ROWNUM thing with FOR UPDATE
# very well.
- t = Table('t1', metadata, Column('id', Integer, primary_key=True),
- Column('data', Integer)
- )
+ t = Table('t1',
+ metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', Integer))
metadata.create_all()
t.insert().execute(
def test_quoted_column_non_unicode(self):
metadata = self.metadata
table = Table("atable", metadata,
- Column("_underscorecolumn", Unicode(255), primary_key=True),
- )
+ Column("_underscorecolumn",
+ Unicode(255),
+ primary_key=True))
metadata.create_all()
table.insert().execute(
def test_quoted_column_unicode(self):
metadata = self.metadata
table = Table("atable", metadata,
- Column(u("méil"), Unicode(255), primary_key=True),
- )
+ Column(u("méil"), Unicode(255), primary_key=True))
metadata.create_all()
table.insert().execute(
cls.dblink = config.file_config.get('sqla_testing', 'oracle_db_link')
# note that the synonym here is still not totally functional
- # when accessing via a different username as we do with the multiprocess
- # test suite, so testing here is minimal
+ # when accessing via a different username as we do with the
+ # multiprocess test suite, so testing here is minimal
with testing.db.connect() as conn:
- conn.execute(
- "create table test_table "
- "(id integer primary key, data varchar2(50))")
+ conn.execute("create table test_table "
+ "(id integer primary key, data varchar2(50))")
conn.execute("create synonym test_table_syn "
- "for test_table@%s" % cls.dblink)
+ "for test_table@%s" % cls.dblink)
@classmethod
def teardown_class(cls):
m = MetaData()
t = Table('test_table_syn', m, autoload=True,
- autoload_with=testing.db, oracle_resolve_synonyms=True)
+ autoload_with=testing.db, oracle_resolve_synonyms=True)
eq_(list(t.c.keys()), ['id', 'data'])
eq_(list(t.primary_key), [t.c.id])
from sqlalchemy.connectors import pyodbc
from sqlalchemy.testing import fixtures
+
class PyODBCTest(fixtures.TestBase):
def test_pyodbc_version(self):
connector = pyodbc.PyODBCConnector()
eq_(
connector._parse_dbapi_version(vers),
expected
- )
\ No newline at end of file
+ )
# """id""" integer NOT NULL PRIMARY KEY,
# """aid""" integer NULL
# REFERENCES """a""" ("""id""")
- #)
- #''')
+ # )
+ # ''')
table1 = Table(r'"a"', metadata, autoload=True)
assert '"id"' in table1.c
- #table2 = Table(r'"b"', metadata, autoload=True)
- #j = table1.join(table2)
+ # table2 = Table(r'"b"', metadata, autoload=True)
+ # j = table1.join(table2)
# assert j.onclause.compare(table1.c['"id"']
# == table2.c['"aid"'])
# test quoting and all that
idx2 = Index('test_idx2', tbl.c.data,
- sqlite_where=and_(tbl.c.data > 'a', tbl.c.data
- < "b's"))
+ sqlite_where=and_(tbl.c.data > 'a', tbl.c.data < "b's"))
self.assert_compile(schema.CreateIndex(idx),
'CREATE INDEX test_idx1 ON testtbl (data) '
'WHERE data > 5 AND data < 10',
testing.db.execute('CREATE VIRTUAL TABLE t using FTS3;')
testing.db.execute('DROP TABLE t;')
return False
- except:
+ except Exception:
return True
+
metadata = cattable = matchtable = None
def test_expression(self):
self.assert_compile(matchtable.c.title.match('somstr'),
- 'matchtable.title MATCH ?', dialect=sqlite.dialect())
+ 'matchtable.title MATCH ?',
+ dialect=sqlite.dialect())
def test_simple_match(self):
results = \
"d", "d1", "d2", "c", "b", "a1", "a2"]:
try:
conn.execute("drop table %s" % name)
- except:
+ except Exception:
pass
def test_legacy_quoted_identifiers_unit(self):
from sqlalchemy.testing.suite import *
-
-
"Sybase ASE does not support OFFSET",
stmt.compile, dialect=self.__dialect__
)
-
-
def test_create_drop_explicit(self):
metadata = MetaData()
- table = Table('test_table', metadata,
- Column('foo', Integer))
+ table = Table('test_table', metadata, Column('foo', Integer))
for bind in (
testing.db,
testing.db.connect()
def test_create_drop_err_table(self):
metadata = MetaData()
- table = Table('test_table', metadata,
- Column('foo', Integer))
+ table = Table('test_table', metadata, Column('foo', Integer))
for meth in [
table.exists,
testing.db.connect()
):
metadata = meta()
- table = Table('test_table', metadata,
- Column('foo', Integer))
+ table = Table('test_table', metadata, Column('foo', Integer))
metadata.bind = bind
assert metadata.bind is table.bind is bind
metadata.create_all()
assert not table.exists()
metadata = meta()
- table = Table('test_table', metadata,
- Column('foo', Integer))
+ table = Table('test_table', metadata, Column('foo', Integer))
metadata.bind = bind
):
metadata = MetaData(*args[0], **args[1])
table = Table('test_table', metadata,
- Column('foo', Integer))
+ Column('foo', Integer))
assert metadata.bind is table.bind is bind
metadata.create_all()
assert table.exists()
def test_implicit_execution(self):
metadata = MetaData()
table = Table('test_table', metadata,
- Column('foo', Integer),
- test_needs_acid=True,
- )
+ Column('foo', Integer),
+ test_needs_acid=True)
conn = testing.db.connect()
metadata.create_all(bind=conn)
try:
def test_clauseelement(self):
metadata = MetaData()
- table = Table('test_table', metadata,
- Column('foo', Integer))
+ table = Table('test_table', metadata, Column('foo', Integer))
metadata.create_all(bind=testing.db)
try:
for elem in [
table.select,
lambda **kwargs: sa.func.current_timestamp(**kwargs).select(),
- # func.current_timestamp().select,
+ # func.current_timestamp().select,
lambda **kwargs:text("select * from test_table", **kwargs)
]:
for bind in (
def test_append_listener(self):
metadata, table, bind = self.metadata, self.table, self.bind
- fn = lambda *a: None
+ def fn(*a): return None
table.append_ddl_listener('before-create', fn)
assert_raises(exc.InvalidRequestError, table.append_ddl_listener,
- 'blah', fn)
+ 'blah', fn)
metadata.append_ddl_listener('before-create', fn)
assert_raises(exc.InvalidRequestError, metadata.append_ddl_listener,
- 'blah', fn)
+ 'blah', fn)
class DDLExecutionTest(fixtures.TestBase):
metadata, users, engine = self.metadata, self.users, self.engine
canary = []
users.append_ddl_listener('before-create',
- lambda e, t, b: canary.append('mxyzptlk')
- )
+ lambda e, t, b: canary.append('mxyzptlk'))
users.append_ddl_listener('after-create',
- lambda e, t, b: canary.append('klptzyxm')
- )
+ lambda e, t, b: canary.append('klptzyxm'))
users.append_ddl_listener('before-drop',
- lambda e, t, b: canary.append('xyzzy')
- )
+ lambda e, t, b: canary.append('xyzzy'))
users.append_ddl_listener('after-drop',
- lambda e, t, b: canary.append('fnord')
- )
+ lambda e, t, b: canary.append('fnord'))
metadata.create_all()
assert 'mxyzptlk' in canary
def test_deprecated_append_ddl_listener_metadata(self):
metadata, users, engine = self.metadata, self.users, self.engine
canary = []
- metadata.append_ddl_listener('before-create',
- lambda e, t, b, tables=None: canary.append('mxyzptlk')
- )
- metadata.append_ddl_listener('after-create',
- lambda e, t, b, tables=None: canary.append('klptzyxm')
- )
- metadata.append_ddl_listener('before-drop',
- lambda e, t, b, tables=None: canary.append('xyzzy')
- )
- metadata.append_ddl_listener('after-drop',
- lambda e, t, b, tables=None: canary.append('fnord')
- )
+ metadata.append_ddl_listener(
+ 'before-create',
+ lambda e, t, b, tables=None: canary.append('mxyzptlk')
+ )
+ metadata.append_ddl_listener(
+ 'after-create',
+ lambda e, t, b, tables=None: canary.append('klptzyxm')
+ )
+ metadata.append_ddl_listener(
+ 'before-drop',
+ lambda e, t, b, tables=None: canary.append('xyzzy')
+ )
+ metadata.append_ddl_listener(
+ 'after-drop',
+ lambda e, t, b, tables=None: canary.append('fnord')
+ )
metadata.create_all()
assert 'mxyzptlk' in canary
nonpg_mock = engines.mock_engine(dialect_name='sqlite')
pg_mock = engines.mock_engine(dialect_name='postgresql')
constraint = CheckConstraint('a < b', name='my_test_constraint',
- table=users)
+ table=users)
# by placing the constraint in an Add/Drop construct, the
# 'inline_ddl' flag is set to False
nonpg_mock = engines.mock_engine(dialect_name='sqlite')
pg_mock = engines.mock_engine(dialect_name='postgresql')
constraint = CheckConstraint('a < b', name='my_test_constraint',
- table=users)
+ table=users)
# by placing the constraint in an Add/Drop construct, the
# 'inline_ddl' flag is set to False
class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
def mock_engine(self):
- executor = lambda *a, **kw: None
+ def executor(*a, **kw): return None
engine = create_engine(testing.db.name + '://',
strategy='mock', executor=executor)
engine.dialect.identifier_preparer = \
- tsa.sql.compiler.IdentifierPreparer(engine.dialect)
+ tsa.sql.compiler.IdentifierPreparer(engine.dialect)
return engine
def test_tokens(self):
# overrides are used piece-meal and verbatim.
ddl = DDL('%(schema)s-%(table)s-%(fullname)s-%(bonus)s',
- context={'schema': 'S S', 'table': 'T T', 'bonus': 'b'
- })
+ context={'schema': 'S S', 'table': 'T T', 'bonus': 'b'})
self.assert_compile(ddl.against(sane_alone), 'S S-T T-t-b',
dialect=dialect)
self.assert_compile(ddl.against(sane_schema), 'S S-T T-s.t-b',
assert DDL('')._should_execute(tbl, cx)
assert DDL('').execute_if(dialect=target)._should_execute(tbl, cx)
assert not DDL('').execute_if(dialect='bogus').\
- _should_execute(tbl, cx)
+ _should_execute(tbl, cx)
assert DDL('').execute_if(callable_=lambda d, y, z, **kw: True).\
- _should_execute(tbl, cx)
+ _should_execute(tbl, cx)
assert(DDL('').execute_if(
callable_=lambda d, y, z, **kw: z.engine.name
!= 'bogus').
assert DDL('')._should_execute_deprecated('x', tbl, cx)
assert DDL('', on=target)._should_execute_deprecated('x', tbl, cx)
assert not DDL('', on='bogus').\
- _should_execute_deprecated('x', tbl, cx)
+ _should_execute_deprecated('x', tbl, cx)
assert DDL('', on=lambda d, x, y, z: True).\
- _should_execute_deprecated('x', tbl, cx)
+ _should_execute_deprecated('x', tbl, cx)
assert(DDL('', on=lambda d, x, y, z: z.engine.name != 'bogus').
_should_execute_deprecated('x', tbl, cx))
conn.close()
eng.dispose()
-
conn = eng.connect()
conn.close()
engine, buf = self._engine_fixture()
metadata = MetaData()
t = Table('testtable', metadata,
- Column(
- 'pk', Integer, Sequence('testtable_pk_seq'), primary_key=True)
- )
+ Column('pk',
+ Integer,
+ Sequence('testtable_pk_seq'),
+ primary_key=True))
t.create(engine)
t.drop(engine)
engines.testing_engine(options=dict(implicit_returning=False,
strategy='threadlocal')),
engines.testing_engine(options=dict(implicit_returning=False)).
- connect()
+ connect()
]:
event.listen(engine, 'before_execute', execute)
event.listen(engine, 'before_cursor_execute', cursor_execute)
def handle_error(ctx):
assert ctx.engine is eng
assert ctx.connection is conn
- assert isinstance(ctx.sqlalchemy_exception, tsa.exc.ProgrammingError)
+ assert isinstance(ctx.sqlalchemy_exception,
+ tsa.exc.ProgrammingError)
raise MySpecialException("failed operation")
conn = eng.connect()
('INSERT INTO t1 (c1, c2)', {'c1': 6}, None),
('select * from t1', {}, None),
('DROP TABLE t1', {}, None)]
- if not testing.against('oracle+zxjdbc'): # or engine.dialect.pr
- # eexecute_pk_sequence
- # s:
+ # or engine.dialect.pr eexecute_pk_sequence s:
+ # original comment above moved here for pep8 fix
+ if not testing.against('oracle+zxjdbc'):
cursor = [
('CREATE TABLE t1', {}, ()),
('INSERT INTO t1 (c1, c2)', {
conn.connection.invalidate()
conn = e.connect()
eq_(conn.info['boom'], "one")
-
-
r"\[{'data': '0'}, {'data': '1'}, {'data': '2'}, "
r"{'data': '3'}, {'data': '4'}, {'data': '5'}, "
r"{'data': '6'}, {'data': '7'} ... displaying 10 of "
- r"100 total bound parameter sets ... {'data': '98'}, {'data': '99'}\]",
+ r"100 total bound parameter sets ... {'data': '98'}, "
+ r"{'data': '99'}\]",
lambda: self.eng.execute(
"INSERT INTO nonexistent (data) values (:data)",
[{"data": str(i)} for i in range(100)]
assert_raises_message(
tsa.exc.DBAPIError,
r".*INSERT INTO nonexistent \(data\) values "
- r"\(\?\)'\] \[parameters: \[\('0',\), \('1',\), \('2',\), \('3',\), "
- r"\('4',\), \('5',\), \('6',\), \('7',\) "
+ r"\(\?\)'\] \[parameters: \[\('0',\), \('1',\), \('2',\), "
+ r"\('3',\), \('4',\), \('5',\), \('6',\), \('7',\) "
r"... displaying "
r"10 of 100 total bound parameter sets ... "
r"\('98',\), \('99',\)\]",
'sqlite',
'oracle',
'mssql'):
- exec ('from sqlalchemy.dialects import %s\ndialect = '
- '%s.dialect()' % (name, name), globals())
+ exec('from sqlalchemy.dialects import %s\ndialect = '
+ '%s.dialect()' % (name, name), globals())
eq_(dialect.name, name)
connect=Mock(side_effect=connect)
)
+
mock_dbapi = MockDBAPI()
mock_sqlite_dbapi = msd = MockDBAPI()
)
-
class PyBooleanProcessorTest(_BooleanProcessorTest):
@classmethod
def setup_class(cls):
@classmethod
def setup_class(cls):
from sqlalchemy import processors
- cls.module = type("util", (object,),
- dict(
- (k, staticmethod(v))
- for k, v in list(processors.py_fallback().items())
- )
- )
+ cls.module = type(
+ "util",
+ (object,),
+ dict((k, staticmethod(v))
+ for k, v in list(processors.py_fallback().items()))
+ )
class CDateProcessorTest(_DateProcessorTest):
@classmethod
def setup_class(cls):
from sqlalchemy.engine import util
- cls.module = type("util", (object,),
- dict(
- (k, staticmethod(v))
- for k, v in list(util.py_fallback().items())
- )
+ cls.module = type(
+ "util",
+ (object,),
+ dict((k, staticmethod(v))
+ for k, v in list(util.py_fallback().items()))
)
meta = self.metadata
users = Table('engine_users', meta,
- Column('user_id', sa.INT, primary_key=True),
- Column('user_name', sa.VARCHAR(20), nullable=False),
- Column('test1', sa.CHAR(5), nullable=False),
- Column('test2', sa.Float(5), nullable=False),
- Column('test3', sa.Text),
- Column('test4', sa.Numeric(10, 2), nullable=False),
- Column('test5', sa.Date),
- Column('parent_user_id', sa.Integer,
- sa.ForeignKey('engine_users.user_id')),
- Column('test6', sa.Date, nullable=False),
- Column('test7', sa.Text),
- Column('test8', sa.LargeBinary),
- Column('test_passivedefault2', sa.Integer, server_default='5'),
- Column('test9', sa.LargeBinary(100)),
- Column('test10', sa.Numeric(10, 2)),
- test_needs_fk=True,
- )
+ Column('user_id', sa.INT, primary_key=True),
+ Column('user_name', sa.VARCHAR(20), nullable=False),
+ Column('test1', sa.CHAR(5), nullable=False),
+ Column('test2', sa.Float(5), nullable=False),
+ Column('test3', sa.Text),
+ Column('test4', sa.Numeric(10, 2), nullable=False),
+ Column('test5', sa.Date),
+ Column('parent_user_id', sa.Integer,
+ sa.ForeignKey('engine_users.user_id')),
+ Column('test6', sa.Date, nullable=False),
+ Column('test7', sa.Text),
+ Column('test8', sa.LargeBinary),
+ Column('test_passivedefault2',
+ sa.Integer, server_default='5'),
+ Column('test9', sa.LargeBinary(100)),
+ Column('test10', sa.Numeric(10, 2)),
+ test_needs_fk=True)
addresses = Table(
'engine_email_addresses',
autoload=True,
autoload_with=testing.db)
reflected_addresses = Table('engine_email_addresses',
- meta2, autoload=True, autoload_with=testing.db)
+ meta2,
+ autoload=True,
+ autoload_with=testing.db)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
meta.create_all()
meta2 = MetaData()
- reflected_t = Table('t', meta2,
- autoload_with=testing.db)
+ reflected_t = Table('t', meta2, autoload_with=testing.db)
self.assert_tables_equal(t, reflected_t)
@testing.provide_metadata
Column('t3id', sa.Integer, sa.ForeignKey('t3.id')),
test_needs_fk=True,
)
- Table('t2', meta,
- Column('id', sa.Integer, primary_key=True),
- test_needs_fk=True)
- Table('t3', meta,
- Column('id', sa.Integer, primary_key=True),
- test_needs_fk=True)
+ Table('t2',
+ meta,
+ Column('id', sa.Integer, primary_key=True),
+ test_needs_fk=True)
+ Table('t3',
+ meta,
+ Column('id', sa.Integer, primary_key=True),
+ test_needs_fk=True)
meta.create_all()
meta2 = MetaData()
t1r, t2r, t3r = [Table(x, meta2, autoload=True,
meta = self.metadata
Table('t', meta,
- Column('id', Integer, primary_key=True),
- Column('x', Integer),
- Column('y', Integer),
- Column('z', Integer, server_default="5"),
- )
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('z', Integer, server_default="5"))
meta.create_all()
m2 = MetaData()
t2 = Table('t', m2, old_z, old_q)
eq_(t2.primary_key.columns, (t2.c.z, ))
t2 = Table('t', m2, old_y,
- extend_existing=True,
- autoload=True,
- autoload_with=testing.db)
+ extend_existing=True,
+ autoload=True,
+ autoload_with=testing.db)
eq_(
set(t2.columns.keys()),
set(['x', 'y', 'z', 'q', 'id'])
m3 = MetaData()
t3 = Table('t', m3, Column('z', Integer))
t3 = Table('t', m3, extend_existing=False,
- autoload=True,
- autoload_with=testing.db)
+ autoload=True,
+ autoload_with=testing.db)
eq_(
set(t3.columns.keys()),
set(['z'])
t4 = Table('t', m4, old_z, old_q)
eq_(t4.primary_key.columns, (t4.c.z, ))
t4 = Table('t', m4, old_y,
- extend_existing=True,
- autoload=True,
- autoload_replace=False,
- autoload_with=testing.db)
+ extend_existing=True,
+ autoload=True,
+ autoload_replace=False,
+ autoload_with=testing.db)
eq_(
set(t4.columns.keys()),
set(['x', 'y', 'z', 'q', 'id'])
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
- Column('a_id', Integer))
+ Column('a_id', Integer))
self.metadata.create_all()
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
b2 = Table('b', m2, extend_existing=True, autoload=True,
- autoload_with=testing.db,
- autoload_replace=False)
+ autoload_with=testing.db,
+ autoload_replace=False)
assert b2.c.id is not None
assert b2.c.a_id.references(a2.c.id)
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
- Column('a_id', Integer, sa.ForeignKey('a.id')))
+ Column('a_id', Integer, sa.ForeignKey('a.id')))
self.metadata.create_all()
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
b2 = Table('b', m2, extend_existing=True, autoload=True,
- autoload_with=testing.db,
- autoload_replace=False)
+ autoload_with=testing.db,
+ autoload_replace=False)
assert b2.c.id is not None
assert b2.c.a_id.references(a2.c.id)
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
- Column('a_id', Integer, sa.ForeignKey('a.id')))
+ Column('a_id', Integer, sa.ForeignKey('a.id')))
self.metadata.create_all()
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
b2 = Table('b', m2, extend_existing=True, autoload=True,
- autoload_with=testing.db,
- autoload_replace=False)
+ autoload_with=testing.db,
+ autoload_replace=False)
assert b2.c.id is not None
assert not b2.c.a_id.references(a2.c.id)
a2 = Table('a', m2, Column('id', Integer, primary_key=True))
Table('a', m2, autoload=True, autoload_with=testing.db,
- autoload_replace=False, extend_existing=True)
+ autoload_replace=False, extend_existing=True)
eq_(list(a2.primary_key), [a2.c.id])
def test_autoload_replace_arg(self):
t2a = Table('test2', m2, autoload=True)
assert t2a._autoincrement_column is None
-
@skip('sqlite')
@testing.provide_metadata
def test_unknown_types(self):
"""
meta = self.metadata
t = Table("test", meta,
- Column('foo', sa.DateTime))
+ Column('foo', sa.DateTime))
ischema_names = testing.db.dialect.ischema_names
t.create()
meta = self.metadata
Table('users', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('name', sa.String(30)))
+ Column('id', sa.Integer, primary_key=True),
+ Column('name', sa.String(30)))
Table('addresses', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('street', sa.String(30)))
+ Column('id', sa.Integer, primary_key=True),
+ Column('street', sa.String(30)))
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
- Column('id', sa.Integer,
- sa.ForeignKey('users.id'), primary_key=True),
- autoload=True)
+ Column('id', sa.Integer,
+ sa.ForeignKey('users.id'), primary_key=True),
+ autoload=True)
u2 = Table('users', meta2, autoload=True)
assert list(a2.primary_key) == [a2.c.id]
meta3 = MetaData(testing.db)
u3 = Table('users', meta3, autoload=True)
a3 = Table('addresses', meta3,
- Column('id', sa.Integer, sa.ForeignKey('users.id'),
- primary_key=True),
- autoload=True)
+ Column('id', sa.Integer, sa.ForeignKey('users.id'),
+ primary_key=True),
+ autoload=True)
assert list(a3.primary_key) == [a3.c.id]
assert list(u3.primary_key) == [u3.c.id]
meta = self.metadata
Table('users', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('name', sa.String(30)))
+ Column('id', sa.Integer, primary_key=True),
+ Column('name', sa.String(30)))
Table('addresses', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('street', sa.String(30)),
- Column('user_id', sa.Integer))
+ Column('id', sa.Integer, primary_key=True),
+ Column('street', sa.String(30)),
+ Column('user_id', sa.Integer))
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
- Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
- autoload=True)
+ Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
+ autoload=True)
u2 = Table('users', meta2, autoload=True)
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.foreign_keys) == 1
u3 = Table('users', meta3, autoload=True)
- a3 = Table('addresses', meta3, Column('user_id',
- sa.Integer, sa.ForeignKey('users.id')),
+ a3 = Table('addresses', meta3,
+ Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
autoload=True)
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id)
meta4 = MetaData(testing.db)
u4 = Table('users', meta4,
- Column('id', sa.Integer, key='u_id', primary_key=True),
- autoload=True)
+ Column('id', sa.Integer, key='u_id', primary_key=True),
+ autoload=True)
a4 = Table(
'addresses',
Column('id', sa.Integer, key='street',
primary_key=True),
Column('street', sa.String(30), key='user_id'),
- Column('user_id', sa.Integer, sa.ForeignKey('users.u_id'
- ), key='id'),
- autoload=True,
- )
+ Column('user_id', sa.Integer, sa.ForeignKey('users.u_id'),
+ key='id'),
+ autoload=True)
assert u4.join(a4).onclause.compare(u4.c.u_id == a4.c.id)
assert list(u4.primary_key) == [u4.c.u_id]
assert len(u4.columns) == 2
metadata = self.metadata
Table('a',
- metadata,
- Column('x', sa.Integer, primary_key=True),
- Column('y', sa.Integer, primary_key=True),
- )
+ metadata,
+ Column('x', sa.Integer, primary_key=True),
+ Column('y', sa.Integer, primary_key=True))
Table('b',
- metadata,
- Column('x', sa.Integer, primary_key=True),
- Column('y', sa.Integer, primary_key=True),
- sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y'])
- )
+ metadata,
+ Column('x', sa.Integer, primary_key=True),
+ Column('y', sa.Integer, primary_key=True),
+ sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y']))
metadata.create_all()
c2 = Column('y', sa.Integer, primary_key=True)
f1 = sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y'])
b1 = Table('b',
- meta2, c1, c2, f1,
- autoload=True,
- autoload_with=testing.db
- )
+ meta2, c1, c2, f1,
+ autoload=True,
+ autoload_with=testing.db)
assert b1.c.x is c1
assert b1.c.y is c2
meta = self.metadata
Table('a', meta,
- Column('x', sa.Integer, primary_key=True),
- Column('z', sa.Integer),
- test_needs_fk=True
- )
+ Column('x', sa.Integer, primary_key=True),
+ Column('z', sa.Integer),
+ test_needs_fk=True)
Table('b', meta,
- Column('y', sa.Integer, sa.ForeignKey('a.x')),
- test_needs_fk=True
- )
+ Column('y', sa.Integer, sa.ForeignKey('a.x')),
+ test_needs_fk=True)
meta.create_all()
m2 = MetaData(testing.db)
a2 = Table('a', m2,
- Column('x', sa.Integer, primary_key=True, key='x1'),
- autoload=True)
+ Column('x', sa.Integer, primary_key=True, key='x1'),
+ autoload=True)
b2 = Table('b', m2, autoload=True)
assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y)
assert b2.c.y.references(a2.c.x1)
meta = self.metadata
Table('a', meta,
- Column('x', sa.Integer, primary_key=True),
- Column('z', sa.Integer),
- test_needs_fk=True
- )
+ Column('x', sa.Integer, primary_key=True),
+ Column('z', sa.Integer),
+ test_needs_fk=True)
Table('b', meta,
- Column('y', sa.Integer, sa.ForeignKey('a.x')),
- test_needs_fk=True
- )
+ Column('y', sa.Integer, sa.ForeignKey('a.x')),
+ test_needs_fk=True)
meta.create_all()
m2 = MetaData(testing.db)
a2 = Table('a', m2, include_columns=['z'], autoload=True)
meta = self.metadata
Table('users', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('name', sa.String(30)),
- test_needs_fk=True)
+ Column('id', sa.Integer, primary_key=True),
+ Column('name', sa.String(30)),
+ test_needs_fk=True)
Table('addresses', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
- test_needs_fk=True)
+ Column('id', sa.Integer, primary_key=True),
+ Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
+ test_needs_fk=True)
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
- Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
- autoload=True)
+ Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
+ autoload=True)
u2 = Table('users', meta2, autoload=True)
s = sa.select([a2])
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
meta2 = MetaData(testing.db)
- u2 = Table('users', meta2, Column('id', sa.Integer,
- primary_key=True), autoload=True)
- a2 = Table('addresses', meta2, Column('id', sa.Integer,
- primary_key=True), Column('user_id', sa.Integer,
- sa.ForeignKey('users.id')), autoload=True)
+ u2 = Table('users', meta2, Column('id', sa.Integer, primary_key=True),
+ autoload=True)
+ a2 = Table('addresses', meta2,
+ Column('id', sa.Integer, primary_key=True),
+ Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
+ autoload=True)
s = sa.select([a2])
assert s.c.user_id is not None
if testing.against('postgresql'):
test_attrs = ('match', 'onupdate', 'ondelete',
- 'deferrable', 'initially')
+ 'deferrable', 'initially')
addresses_user_id_fkey = sa.ForeignKey(
# Each option is specifically not a Postgres default, or
# it won't be returned by PG's inspection
meta = self.metadata
Table('users', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('name', sa.String(30)),
- test_needs_fk=True)
+ Column('id', sa.Integer, primary_key=True),
+ Column('name', sa.String(30)),
+ test_needs_fk=True)
Table('addresses', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('user_id', sa.Integer, addresses_user_id_fkey),
- test_needs_fk=True)
+ Column('id', sa.Integer, primary_key=True),
+ Column('user_id', sa.Integer, addresses_user_id_fkey),
+ test_needs_fk=True)
meta.create_all()
meta2 = MetaData()
def test_fk_error(self):
metadata = MetaData(testing.db)
Table('slots', metadata,
- Column('slot_id', sa.Integer, primary_key=True),
- Column('pkg_id', sa.Integer, sa.ForeignKey('pkgs.pkg_id')),
- Column('slot', sa.String(128)),
- )
+ Column('slot_id', sa.Integer, primary_key=True),
+ Column('pkg_id', sa.Integer, sa.ForeignKey('pkgs.pkg_id')),
+ Column('slot', sa.String(128)))
assert_raises_message(
sa.exc.InvalidRequestError,
test_needs_fk=True,
)
multi2 = Table('multi2', meta,
- Column('id', sa.Integer, primary_key=True),
- Column('foo', sa.Integer),
- Column('bar', sa.Integer),
- Column('lala', sa.Integer),
- Column('data', sa.String(50)),
- sa.ForeignKeyConstraint(['foo', 'bar', 'lala'],
- ['multi.multi_id', 'multi.multi_rev', 'multi.multi_hoho'
- ]),
- test_needs_fk=True,
- )
+ Column('id', sa.Integer, primary_key=True),
+ Column('foo', sa.Integer),
+ Column('bar', sa.Integer),
+ Column('lala', sa.Integer),
+ Column('data', sa.String(50)),
+ sa.ForeignKeyConstraint(['foo', 'bar', 'lala'],
+ ['multi.multi_id',
+ 'multi.multi_rev',
+ 'multi.multi_hoho']),
+ test_needs_fk=True,
+ )
meta.create_all()
meta2 = MetaData()
j = sa.join(table, table2)
self.assert_(sa.and_(table.c.multi_id == table2.c.foo,
- table.c.multi_rev == table2.c.bar,
- table.c.multi_hoho
- == table2.c.lala).compare(j.onclause))
+ table.c.multi_rev == table2.c.bar,
+ table.c.multi_hoho == table2.c.lala)
+ .compare(j.onclause))
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
@testing.requires.check_constraints
# error
meta = self.metadata
- table_a = Table('select', meta, Column('not', sa.Integer,
- primary_key=True), Column('from',
- sa.String(12), nullable=False),
+ table_a = Table('select', meta,
+ Column('not', sa.Integer, primary_key=True),
+ Column('from', sa.String(12), nullable=False),
sa.UniqueConstraint('from', name='when'))
sa.Index('where', table_a.c['from'])
quoter = meta.bind.dialect.identifier_preparer.quote_identifier
Table('false', meta,
- Column('create', sa.Integer, primary_key=True),
- Column('true', sa.Integer, sa.ForeignKey('select.not')),
- sa.CheckConstraint('%s <> 1'
- % quoter(check_col), name='limit')
- )
+ Column('create', sa.Integer, primary_key=True),
+ Column('true', sa.Integer, sa.ForeignKey('select.not')),
+ sa.CheckConstraint('%s <> 1' % quoter(check_col), name='limit'))
table_c = Table('is', meta,
- Column('or', sa.Integer, nullable=False, primary_key=True),
- Column('join', sa.Integer, nullable=False, primary_key=True),
- sa.PrimaryKeyConstraint('or', 'join', name='to')
- )
+ Column('or', sa.Integer, nullable=False,
+ primary_key=True),
+ Column('join', sa.Integer, nullable=False,
+ primary_key=True),
+ sa.PrimaryKeyConstraint('or', 'join', name='to'))
index_c = sa.Index('else', table_c.c.join)
meta.create_all()
index_c.drop()
@testing.uses_deprecated()
def test_reflect_uses_bind_constructor_conn(self):
self._test_reflect_uses_bind(lambda e: MetaData(e.connect(),
- reflect=True))
+ reflect=True))
@testing.uses_deprecated()
def test_reflect_uses_bind_constructor_engine(self):
def test_index_reflection(self):
m1 = self.metadata
t1 = Table('party', m1,
- Column('id', sa.Integer, nullable=False),
- Column('name', sa.String(20), index=True)
- )
+ Column('id', sa.Integer, nullable=False),
+ Column('name', sa.String(20), index=True))
sa.Index('idx1', t1.c.id, unique=True)
sa.Index('idx2', t1.c.name, t1.c.id, unique=False)
m1.create_all()
assert r1.name == 'idx1'
assert r2.name == 'idx2'
- assert r1.unique == True
- assert r2.unique == False
- assert r3.unique == False
+ assert r1.unique == True # noqa
+ assert r2.unique == False # noqa
+ assert r3.unique == False # noqa
assert set([t2.c.id]) == set(r1.columns)
assert set([t2.c.name, t2.c.id]) == set(r2.columns)
assert set([t2.c.name]) == set(r3.columns)
eq_(
set(m2.tables),
set(['email_addresses_v', 'users_v',
- 'users', 'dingalings', 'email_addresses'])
+ 'users', 'dingalings', 'email_addresses'])
)
finally:
_drop_views(metadata.bind)
global metadata, users
metadata = MetaData()
users = Table('users', metadata,
- Column('user_id', sa.Integer,
- sa.Sequence('user_id_seq', optional=True),
- primary_key=True),
- Column('user_name', sa.String(40)))
+ Column('user_id', sa.Integer,
+ sa.Sequence('user_id_seq', optional=True),
+ primary_key=True),
+ Column('user_name', sa.String(40)))
Table('email_addresses', metadata,
- Column('address_id', sa.Integer,
- sa.Sequence('address_id_seq', optional=True),
- primary_key=True),
- Column('user_id',
- sa.Integer, sa.ForeignKey(users.c.user_id)),
- Column('email_address', sa.String(40)))
+ Column('address_id', sa.Integer,
+ sa.Sequence('address_id_seq', optional=True),
+ primary_key=True),
+ Column('user_id',
+ sa.Integer, sa.ForeignKey(users.c.user_id)),
+ Column('email_address', sa.String(40)))
Table(
'orders',
metadata,
- Column('order_id', sa.Integer, sa.Sequence('order_id_seq',
- optional=True), primary_key=True),
+ Column('order_id',
+ sa.Integer,
+ sa.Sequence('order_id_seq', optional=True),
+ primary_key=True),
Column('user_id', sa.Integer,
sa.ForeignKey(users.c.user_id)),
Column('description', sa.String(50)),
Column('isopen', sa.Integer),
- )
+ )
Table('items', metadata,
- Column('item_id', sa.INT,
- sa.Sequence('items_id_seq', optional=True),
- primary_key=True),
- Column('order_id',
- sa.INT, sa.ForeignKey('orders')),
- Column('item_name', sa.VARCHAR(50)))
+ Column('item_id', sa.INT,
+ sa.Sequence('items_id_seq', optional=True),
+ primary_key=True),
+ Column('order_id',
+ sa.INT, sa.ForeignKey('orders')),
+ Column('item_name', sa.VARCHAR(50)))
def test_sorter(self):
tables = metadata.sorted_tables
users = Table('users', meta, Column('id', sa.Integer))
addresses = Table('addresses', meta,
- Column('id', sa.Integer),
- Column('user_id', sa.Integer))
+ Column('id', sa.Integer),
+ Column('user_id', sa.Integer))
fk = sa.ForeignKeyConstraint(['user_id'], [users.c.id])
for tname, cname, ixname in names:
t = Table(tname, metadata,
- Column('id', sa.Integer,
- sa.Sequence(cname + '_id_seq'),
- primary_key=True),
- Column(cname, Integer)
- )
+ Column('id', sa.Integer,
+ sa.Sequence(cname + '_id_seq'),
+ primary_key=True),
+ Column(cname, Integer))
schema.Index(ixname, t.c[cname])
metadata.create_all(testing.db)
@testing.requires.cross_schema_fk_reflection
def test_has_schema(self):
eq_(testing.db.dialect.has_schema(testing.db,
- testing.config.test_schema), True)
+ testing.config.test_schema), True)
eq_(testing.db.dialect.has_schema(testing.db,
- 'sa_fake_schema_123'), False)
+ 'sa_fake_schema_123'), False)
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
metadata = MetaData(engine)
Table('table1', metadata,
- Column('col1', sa.Integer, primary_key=True),
- test_needs_fk=True,
- schema=schema)
+ Column('col1', sa.Integer, primary_key=True),
+ test_needs_fk=True,
+ schema=schema)
Table('table2', metadata,
- Column('col1', sa.Integer, primary_key=True),
- Column('col2', sa.Integer,
- sa.ForeignKey('%s.table1.col1' % schema)),
- test_needs_fk=True,
- schema=schema)
+ Column('col1', sa.Integer, primary_key=True),
+ Column('col2', sa.Integer,
+ sa.ForeignKey('%s.table1.col1' % schema)),
+ test_needs_fk=True,
+ schema=schema)
try:
metadata.create_all()
metadata.create_all(checkfirst=True)
@testing.requires.schemas
@testing.provide_metadata
def test_schema_translation(self):
- Table('foob', self.metadata, Column('q', Integer), schema=config.test_schema)
+ Table('foob', self.metadata, Column('q', Integer),
+ schema=config.test_schema)
self.metadata.create_all()
m = MetaData()
map_ = {"foob": config.test_schema}
- with config.db.connect().execution_options(schema_translate_map=map_) as conn:
+ with config.db.connect().execution_options(schema_translate_map=map_) \
+ as conn:
t = Table('foob', m, schema="foob", autoload_with=conn)
eq_(t.schema, "foob")
eq_(t.c.keys(), ['q'])
+
@testing.requires.schemas
@testing.fails_on('sybase', 'FIXME: unknown')
def test_explicit_default_schema_metadata(self):
metadata = MetaData(engine, schema=schema)
Table('table1', metadata,
- Column('col1', sa.Integer, primary_key=True),
- test_needs_fk=True)
+ Column('col1', sa.Integer, primary_key=True),
+ test_needs_fk=True)
Table('table2', metadata,
- Column('col1', sa.Integer, primary_key=True),
- Column('col2', sa.Integer,
- sa.ForeignKey('table1.col1')),
- test_needs_fk=True)
+ Column('col1', sa.Integer, primary_key=True),
+ Column('col2', sa.Integer, sa.ForeignKey('table1.col1')),
+ test_needs_fk=True)
try:
metadata.create_all()
metadata.create_all(checkfirst=True)
'%s.dingalings' % testing.config.test_schema,
'%s.users' % testing.config.test_schema,
'%s.email_addresses' % testing.config.test_schema
- ])
+ ])
)
@testing.requires.schemas
@testing.provide_metadata
def test_reflect_all_schemas_default_overlap(self):
t1 = Table('t', self.metadata,
- Column('id', Integer, primary_key=True))
+ Column('id', Integer, primary_key=True))
t2 = Table('t', self.metadata,
- Column('id1', sa.ForeignKey('t.id')),
- schema=testing.config.test_schema
- )
+ Column('id1', sa.ForeignKey('t.id')),
+ schema=testing.config.test_schema)
self.metadata.create_all()
m2 = MetaData()
schema_prefix = ""
users = Table('users', meta,
- Column('user_id', sa.INT, primary_key=True),
- Column('user_name', sa.VARCHAR(20), nullable=False),
- Column('test1', sa.CHAR(5), nullable=False),
- Column('test2', sa.Float(5), nullable=False),
- Column('test3', sa.Text),
- Column('test4', sa.Numeric(10, 2), nullable=False),
- Column('test5', sa.Date),
- Column('test5_1', sa.TIMESTAMP),
- Column('parent_user_id', sa.Integer,
- sa.ForeignKey('%susers.user_id' % schema_prefix)),
- Column('test6', sa.Date, nullable=False),
- Column('test7', sa.Text),
- Column('test8', sa.LargeBinary),
- Column('test_passivedefault2', sa.Integer, server_default='5'),
- Column('test9', sa.LargeBinary(100)),
- Column('test10', sa.Numeric(10, 2)),
- schema=schema,
- test_needs_fk=True,
- )
+ Column('user_id', sa.INT, primary_key=True),
+ Column('user_name', sa.VARCHAR(20), nullable=False),
+ Column('test1', sa.CHAR(5), nullable=False),
+ Column('test2', sa.Float(5), nullable=False),
+ Column('test3', sa.Text),
+ Column('test4', sa.Numeric(10, 2), nullable=False),
+ Column('test5', sa.Date),
+ Column('test5_1', sa.TIMESTAMP),
+ Column('parent_user_id', sa.Integer,
+ sa.ForeignKey('%susers.user_id' % schema_prefix)),
+ Column('test6', sa.Date, nullable=False),
+ Column('test7', sa.Text),
+ Column('test8', sa.LargeBinary),
+ Column('test_passivedefault2', sa.Integer,
+ server_default='5'),
+ Column('test9', sa.LargeBinary(100)),
+ Column('test10', sa.Numeric(10, 2)),
+ schema=schema,
+ test_needs_fk=True)
dingalings = Table("dingalings", meta,
- Column('dingaling_id', sa.Integer, primary_key=True),
- Column('address_id', sa.Integer,
- sa.ForeignKey(
- '%semail_addresses.address_id' % schema_prefix)),
- Column('data', sa.String(30)),
- schema=schema, test_needs_fk=True,
- )
+ Column('dingaling_id', sa.Integer, primary_key=True),
+ Column('address_id', sa.Integer,
+ sa.ForeignKey('%semail_addresses.address_id'
+ % schema_prefix)),
+ Column('data', sa.String(30)),
+ schema=schema, test_needs_fk=True)
addresses = Table('email_addresses', meta,
- Column('address_id', sa.Integer),
- Column('remote_user_id', sa.Integer,
- sa.ForeignKey(users.c.user_id)),
- Column('email_address', sa.String(20)),
- sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'),
- schema=schema,
- test_needs_fk=True,
- )
+ Column('address_id', sa.Integer),
+ Column('remote_user_id', sa.Integer,
+ sa.ForeignKey(users.c.user_id)),
+ Column('email_address', sa.String(20)),
+ sa.PrimaryKeyConstraint('address_id',
+ name='email_ad_pk'),
+ schema=schema,
+ test_needs_fk=True)
return (users, addresses, dingalings)
@classmethod
def define_tables(cls, metadata):
Table('SomeTable', metadata,
- Column('x', Integer, primary_key=True),
- test_needs_fk=True
- )
+ Column('x', Integer, primary_key=True),
+ test_needs_fk=True)
Table('SomeOtherTable', metadata,
- Column('x', Integer, primary_key=True),
- Column('y', Integer, sa.ForeignKey("SomeTable.x")),
- test_needs_fk=True
- )
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, sa.ForeignKey("SomeTable.x")),
+ test_needs_fk=True)
@testing.fails_if(testing.requires._has_mysql_on_windows)
def test_table_names(self):
assert t1.c.x is not None
@testing.fails_if(lambda:
- testing.against(('mysql', '<', (5, 5))) and
- not testing.requires._has_mysql_fully_case_sensitive()
- )
+ testing.against(('mysql', '<', (5, 5))) and
+ not testing.requires._has_mysql_fully_case_sensitive()
+ )
def test_reflect_via_fk(self):
m = MetaData()
t2 = Table("SomeOtherTable", m, autoload=True,
- autoload_with=testing.db)
+ autoload_with=testing.db)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
to_reflect = Table("to_reflect", m, autoload=True, listeners=[
('column_reflect', column_reflect),
])
- related = Table("related", m, autoload=True, listeners=[
- ('column_reflect', column_reflect),
- ])
+ related = Table("related", m, autoload=True,
+ listeners=[('column_reflect', column_reflect)])
assert related.c.qyz.references(to_reflect.c.xyz)
global users, metadata
metadata = MetaData()
users = Table('query_users', metadata,
- Column('user_id', INT, primary_key=True),
- Column('user_name', VARCHAR(20)),
- test_needs_acid=True,
- )
+ Column('user_id', INT, primary_key=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True)
users.create(testing.db)
def teardown(self):
testing.db.transaction(go, users, [dict(user_id=1,
user_name='user1')])
- eq_(testing.db.execute(users.select()).fetchall(), [(1, 'user1'
- )])
+ eq_(testing.db.execute(users.select()).fetchall(), [(1, 'user1')])
assert_raises(exc.DBAPIError, testing.db.transaction, go,
users, [{'user_id': 2, 'user_name': 'user2'},
- {'user_id': 1, 'user_name': 'user3'}])
- eq_(testing.db.execute(users.select()).fetchall(), [(1, 'user1'
- )])
+ {'user_id': 1, 'user_name': 'user3'}])
+ eq_(testing.db.execute(users.select()).fetchall(), [(1, 'user1')])
def test_nested_rollback(self):
connection = testing.db.connect()
trans2 = connection.begin()
try:
connection.execute(users.insert(), user_id=4,
- user_name='user4')
+ user_name='user4')
connection.execute(users.insert(), user_id=5,
- user_name='user5')
+ user_name='user5')
raise Exception('uh oh')
trans2.commit()
- except:
+ except Exception:
trans2.rollback()
raise
transaction.rollback()
raise
except Exception as e:
try:
- assert str(e) == 'uh oh' # and not "This transaction is
- # inactive"
+ # and not "This transaction is inactive"
+ # comment moved here to fix pep8
+ assert str(e) == 'uh oh'
finally:
connection.close()
trans2.commit()
transaction.rollback()
self.assert_(connection.scalar('select count(*) from '
- 'query_users') == 0)
+ 'query_users') == 0)
result = connection.execute('select * from query_users')
assert len(result.fetchall()) == 0
connection.close()
assert not trans.is_active
self.assert_(connection.scalar('select count(*) from '
- 'query_users') == 0)
+ 'query_users') == 0)
trans = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
trans.__exit__(None, None, None)
assert not trans.is_active
self.assert_(connection.scalar('select count(*) from '
- 'query_users') == 1)
+ 'query_users') == 1)
connection.close()
def test_close(self):
transaction.commit()
assert not connection.in_transaction()
self.assert_(connection.scalar('select count(*) from '
- 'query_users') == 5)
+ 'query_users') == 5)
result = connection.execute('select * from query_users')
assert len(result.fetchall()) == 5
connection.close()
transaction.close()
assert not connection.in_transaction()
self.assert_(connection.scalar('select count(*) from '
- 'query_users') == 0)
+ 'query_users') == 0)
result = connection.execute('select * from query_users')
assert len(result.fetchall()) == 0
connection.close()
connection.execute(users.insert(), user_id=3, user_name='user3')
transaction.commit()
eq_(connection.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (3, )])
connection.close()
connection.execute(users.insert(), user_id=3, user_name='user3')
transaction.commit()
eq_(connection.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (2, ), (3, )])
connection.close()
connection.execute(users.insert(), user_id=4, user_name='user4')
transaction.commit()
eq_(connection.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (4, )])
connection.close()
transaction.rollback()
transaction.close()
eq_(connection.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (2, )])
connection.close()
transaction.prepare()
transaction.commit()
eq_(connection.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (2, ), (5, )])
connection.close()
eq_(
connection2.execution_options(autocommit=True).
execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(), [])
+ order_by(users.c.user_id)).fetchall(), [])
recoverables = connection2.recover_twophase()
assert transaction.xid in recoverables
connection2.commit_prepared(transaction.xid, recover=True)
eq_(connection2.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, )])
connection2.close()
xa.commit()
result = \
conn.execute(select([users.c.user_name]).
- order_by(users.c.user_id))
+ order_by(users.c.user_id))
eq_(result.fetchall(), [('user1', ), ('user4', )])
conn.close()
with eng.connect() as conn:
result = \
conn.execute(select([users.c.user_name]).
- order_by(users.c.user_id))
+ order_by(users.c.user_id))
eq_(result.fetchall(), [])
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- users = Table('deadlock_users', metadata, Column('user_id',
- INT, primary_key=True), Column('user_name',
- VARCHAR(20)), test_needs_acid=True)
+ users = Table('deadlock_users', metadata,
+ Column('user_id', INT, primary_key=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True)
users.create(conn1)
conn1.execute('select * from deadlock_users')
conn1.close()
def setup_class(cls):
global metadata, foo
metadata = MetaData(testing.db)
- foo = Table('foo', metadata, Column('id', Integer,
- primary_key=True), Column('data', String(100)))
+ foo = Table('foo', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(100)))
metadata.create_all()
testing.db.execute("create function insert_foo(varchar) "
"returns integer as 'insert into foo(data) "
def test_explicit_compiled(self):
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- conn1.execute(select([func.insert_foo('data1'
- )]).execution_options(autocommit=True))
+ conn1.execute(select([func.insert_foo('data1')])
+ .execution_options(autocommit=True))
assert conn2.execute(select([foo.c.data])).fetchall() \
== [('data1', )]
conn1.close()
conn1 = testing.db.connect()
conn2 = testing.db.connect()
conn1.execution_options(autocommit=True).\
- execute(select([func.insert_foo('data1'
- )]))
- eq_(conn2.execute(select([foo.c.data])).fetchall(), [('data1',
- )])
+ execute(select([func.insert_foo('data1')]))
+ eq_(conn2.execute(select([foo.c.data])).fetchall(), [('data1',)])
# connection supersedes statement
conn1.execution_options(autocommit=False).\
- execute(select([func.insert_foo('data2'
- )]).execution_options(autocommit=True))
- eq_(conn2.execute(select([foo.c.data])).fetchall(), [('data1',
- )])
+ execute(select([func.insert_foo('data2')])
+ .execution_options(autocommit=True))
+ eq_(conn2.execute(select([foo.c.data])).fetchall(), [('data1',)])
# ditto
conn1.execution_options(autocommit=True).\
- execute(select([func.insert_foo('data3'
- )]).execution_options(autocommit=False))
- eq_(conn2.execute(select([foo.c.data])).fetchall(), [('data1',
- ), ('data2', ), ('data3', )])
+ execute(select([func.insert_foo('data3')])
+ .execution_options(autocommit=False))
+ eq_(conn2.execute(select([foo.c.data])).fetchall(),
+ [('data1',), ('data2', ), ('data3', )])
conn1.close()
conn2.close()
def test_explicit_text(self):
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- conn1.execute(text("select insert_foo('moredata')"
- ).execution_options(autocommit=True))
+ conn1.execute(text("select insert_foo('moredata')")
+ .execution_options(autocommit=True))
assert conn2.execute(select([foo.c.data])).fetchall() \
== [('moredata', )]
conn1.close()
def test_explicit_compiled_deprecated(self):
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- conn1.execute(select([func.insert_foo('data1')],
- autocommit=True))
+ conn1.execute(select([func.insert_foo('data1')], autocommit=True))
assert conn2.execute(select([foo.c.data])).fetchall() \
== [('data1', )]
conn1.execute(select([func.insert_foo('data2')]).autocommit())
def test_explicit_text_deprecated(self):
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- conn1.execute(text("select insert_foo('moredata')",
- autocommit=True))
+ conn1.execute(text("select insert_foo('moredata')", autocommit=True))
assert conn2.execute(select([foo.c.data])).fetchall() \
== [('moredata', )]
conn1.close()
def test_implicit_text(self):
conn1 = testing.db.connect()
conn2 = testing.db.connect()
- conn1.execute(text("insert into foo (data) values "
- "('implicitdata')"))
+ conn1.execute(text("insert into foo (data) values ('implicitdata')"))
assert conn2.execute(select([foo.c.data])).fetchall() \
== [('implicitdata', )]
conn1.close()
global users, metadata, tlengine
tlengine = testing_engine(options=dict(strategy='threadlocal'))
metadata = MetaData()
- users = Table('query_users', metadata, Column('user_id', INT,
- Sequence('query_users_id_seq', optional=True),
- primary_key=True), Column('user_name',
- VARCHAR(20)), test_needs_acid=True)
+ users = Table('query_users', metadata,
+ Column('user_id',
+ INT,
+ Sequence('query_users_id_seq', optional=True),
+ primary_key=True),
+ Column('user_name', VARCHAR(20)), test_needs_acid=True)
metadata.create_all(tlengine)
def teardown(self):
tlengine.close()
- @testing.crashes('oracle', 'TNS error of unknown origin occurs on the buildbot.')
+ @testing.crashes('oracle',
+ 'TNS error of unknown origin occurs on the buildbot.')
def test_rollback_no_trans(self):
tlengine = testing_engine(options=dict(strategy="threadlocal"))
assert len(result.fetchall()) == 4
t.close()
external_connection = tlengine.connect()
- result = external_connection.execute('select * from query_users'
- )
+ result = external_connection.execute('select * from query_users')
try:
assert len(result.fetchall()) == 0
finally:
tlengine.execute(users.insert(), user_id=3, user_name='user3')
tlengine.rollback()
external_connection = tlengine.connect()
- result = external_connection.execute('select * from query_users'
- )
+ result = external_connection.execute('select * from query_users')
try:
assert len(result.fetchall()) == 0
finally:
tlengine.execute(users.insert(), user_id=3, user_name='user3')
tlengine.commit()
external_connection = tlengine.connect()
- result = external_connection.execute('select * from query_users'
- )
+ result = external_connection.execute('select * from query_users')
try:
assert len(result.fetchall()) == 3
finally:
tlengine.execute(users.insert(), user_id=4, user_name='user4')
trans.__exit__(None, None, None)
eq_(
- tlengine.execute(users.select().order_by(users.c.user_id)).fetchall(),
+ tlengine.execute(users.select().order_by(users.c.user_id))
+ .fetchall(),
[
(1, 'user1'),
(2, 'user2'),
transaction.commit()
transaction = connection.begin()
result = connection.execute('select * from query_users')
- l = result.fetchall()
- assert len(l) == 3, 'expected 3 got %d' % len(l)
+ rows = result.fetchall()
+ assert len(rows) == 3, 'expected 3 got %d' % len(rows)
transaction.commit()
connection.close()
conn.execute(users.insert(), user_id=3, user_name='user3')
trans.rollback()
external_connection = tlengine.connect()
- result = external_connection.execute('select * from query_users'
- )
+ result = external_connection.execute('select * from query_users')
try:
assert len(result.fetchall()) == 0
finally:
conn.execute(users.insert(), user_id=3, user_name='user3')
trans.rollback()
external_connection = tlengine.connect()
- result = external_connection.execute('select * from query_users'
- )
+ result = external_connection.execute('select * from query_users')
try:
assert len(result.fetchall()) == 0
finally:
conn.execute(users.insert(), user_id=3, user_name='user3')
trans.commit()
external_connection = tlengine.connect()
- result = external_connection.execute('select * from query_users'
- )
+ result = external_connection.execute('select * from query_users')
try:
assert len(result.fetchall()) == 3
finally:
tlengine.rollback()
try:
self.assert_(external_connection.scalar(
- 'select count(*) from query_users'
- ) == 0)
+ 'select count(*) from query_users') == 0)
finally:
external_connection.close()
tlengine.commit()
try:
self.assert_(external_connection.scalar(
- 'select count(*) from query_users'
- ) == 5)
+ 'select count(*) from query_users') == 5)
finally:
external_connection.close()
conn.close()
try:
self.assert_(external_connection.scalar(
- 'select count(*) from query_users'
- ) == 0)
+ 'select count(*) from query_users') == 0)
finally:
external_connection.close()
connection.close()
try:
self.assert_(external_connection.scalar(
- 'select count(*) from query_users'
- ) == 0)
+ 'select count(*) from query_users') == 0)
finally:
external_connection.close()
tlengine.commit()
tlengine.close()
eq_(tlengine.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (3, )])
tlengine.close()
tlengine.commit()
tlengine.close()
eq_(tlengine.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (2, ), (3, )])
tlengine.close()
tlengine.commit()
tlengine.close()
eq_(tlengine.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (4, )])
tlengine.close()
assert r2.connection.closed
assert tlengine.closed
- @testing.crashes('oracle+cx_oracle', 'intermittent failures on the buildbot')
+ @testing.crashes('oracle+cx_oracle',
+ 'intermittent failures on the buildbot')
def test_dispose(self):
eng = testing_engine(options=dict(strategy='threadlocal'))
result = eng.execute(select([1]))
tlengine.prepare()
tlengine.rollback()
eq_(tlengine.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
+ order_by(users.c.user_id)).fetchall(),
[(1, ), (2, )])
eng = testing_engine()
c1 = eng.connect()
with expect_warnings(
- "Connection is already established with a Transaction; "
- "setting isolation_level may implicitly rollback or commit "
- "the existing transaction, or have no effect until next "
- "transaction"
+ "Connection is already established with a Transaction; "
+ "setting isolation_level may implicitly rollback or commit "
+ "the existing transaction, or have no effect until next "
+ "transaction"
):
with c1.begin():
c1 = c1.execution_options(
testing.db.url,
execution_options={
'isolation_level':
- self._non_default_isolation_level()}
+ self._non_default_isolation_level()}
)
conn = eng.connect()
eq_(
stringbased and 'String' or 'Literal')
return ExplicitJoinTest
+
for inline in True, False:
for stringbased in True, False:
testclass = _produce_test(inline, stringbased)
from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
ConcreteBase, has_inherited_table
from sqlalchemy.testing import fixtures, mock
+from test.orm.test_events import _RemoveListeners
Base = None
self._run_test(Engineer, "eid", "pid")
-from test.orm.test_events import _RemoveListeners
-
-
class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
def _roundtrip(self, Employee, Manager, Engineer, Boss,
super(DeferredReflectBase, self).teardown()
_DeferredMapperConfig._configs.clear()
+
Base = None
from sqlalchemy.testing.mock import Mock, call
from sqlalchemy.testing.assertions import expect_warnings
+
class DictCollection(dict):
@collection.appender
def append(self, obj):
self[obj.foo] = obj
+
@collection.remover
def remove(self, obj):
del self[obj.foo]
class ObjectCollection(object):
def __init__(self):
self.values = list()
+
@collection.appender
def append(self, obj):
self.values.append(obj)
+
@collection.remover
def remove(self, obj):
self.values.remove(obj)
+
def __iter__(self):
return iter(self.values)
mapper(Parent, parents_table, properties={
'_children': relationship(Child, lazy='joined',
- collection_class=collection_class)})
+ collection_class=collection_class)})
mapper(Child, children_table)
metadata.create_all()
except TypeError:
assert True
+
class DefaultTest(_CollectionOperations):
collection_class = None
assert_raises(TypeError, set, [p1.children])
-
def test_set_comparisons(self):
Parent, Child = self.Parent, self.Child
set()):
eq_(p1.children.union(other),
- control.union(other))
+ control.union(other))
eq_(p1.children.difference(other),
- control.difference(other))
+ control.difference(other))
eq_((p1.children - other),
- (control - other))
+ (control - other))
eq_(p1.children.intersection(other),
- control.intersection(other))
+ control.intersection(other))
eq_(p1.children.symmetric_difference(other),
- control.symmetric_difference(other))
+ control.symmetric_difference(other))
eq_(p1.children.issubset(other),
- control.issubset(other))
+ control.issubset(other))
eq_(p1.children.issuperset(other),
- control.issuperset(other))
+ control.issuperset(other))
self.assert_((p1.children == other) == (control == other))
self.assert_((p1.children != other) == (control != other))
getattr(control, op)(other)
try:
self.assert_(p.children == control)
- except:
+ except Exception:
print('Test %s.%s(%s):' % (set(base), op, other))
print('want', repr(control))
print('got', repr(p.children))
try:
self.assert_(p.children == control)
- except:
+ except Exception:
print('Test %s.%s(%s):' % (base, op, other))
print('want', repr(control))
print('got', repr(p.children))
try:
self.assert_(p.children == control)
- except:
+ except Exception:
print('Test %s %s %s:' % (set(base), op, other))
print('want', repr(control))
print('got', repr(p.children))
try:
self.assert_(p.children == control)
- except:
+ except Exception:
print('Test %s %s %s:' % (base, op, other))
print('want', repr(control))
print('got', repr(p.children))
class CustomSetTest(SetTest):
collection_class = SetCollection
+
class CustomObjectTest(_CollectionOperations):
collection_class = ObjectCollection
p.children.__getitem__, 1
)
+
class ProxyFactoryTest(ListTest):
def setup(self):
metadata = MetaData(testing.db)
Column('name', String(128)))
class CustomProxy(_AssociationList):
- def __init__(
- self,
- lazy_collection,
- creator,
- value_attr,
- parent,
- ):
+ def __init__(self,
+ lazy_collection,
+ creator,
+ value_attr,
+ parent):
getter, setter = parent._default_getset(lazy_collection)
_AssociationList.__init__(
self,
class Parent(object):
children = association_proxy('_children', 'name',
- proxy_factory=CustomProxy,
- proxy_bulk_set=CustomProxy.extend
- )
+ proxy_factory=CustomProxy,
+ proxy_bulk_set=CustomProxy.extend)
def __init__(self, name):
self.name = name
mapper(Parent, parents_table, properties={
'_children': relationship(Child, lazy='joined',
- collection_class=list)})
+ collection_class=list)})
mapper(Child, children_table)
metadata.create_all()
mapper(Parent, parents_table, properties={
'child': relationship(Child, lazy='joined',
- backref='parent', uselist=False)})
+ backref='parent', uselist=False)})
mapper(Child, children_table)
metadata.create_all()
metadata = self.metadata
a = Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)))
a2b = Table('a2b', metadata,
- Column('id', Integer, primary_key=True),
- Column('id_a', Integer, ForeignKey('a.id')),
- Column('id_b', Integer, ForeignKey('b.id')),
- Column('name', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('id_a', Integer, ForeignKey('a.id')),
+ Column('id_b', Integer, ForeignKey('b.id')),
+ Column('name', String(50)))
b = Table('b', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)))
+
class A(object):
a2b_name = association_proxy("a2b_single", "name")
b_single = association_proxy("a2b_single", "b")
def test_custom_getset(self):
metadata = MetaData()
p = Table('p', metadata,
- Column('id', Integer, primary_key=True),
- Column('cid', Integer, ForeignKey('c.id')))
+ Column('id', Integer, primary_key=True),
+ Column('cid', Integer, ForeignKey('c.id')))
c = Table('c', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(128)))
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(128)))
get = Mock()
set_ = Mock()
+
class Parent(object):
foo = association_proxy('child', 'foo',
- getset_factory=lambda cc, parent: (get, set_))
+ getset_factory=lambda cc,
+ parent: (get, set_))
class Child(object):
def __init__(self, foo):
eq_(set_.mock_calls, [call(child, "y")])
-
class LazyLoadTest(fixtures.TestBase):
def setup(self):
metadata = MetaData(testing.db)
def __init__(self, name):
self.name = name
-
mapper(Child, children_table)
metadata.create_all()
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='select',
- collection_class=list)})
+ collection_class=list)})
p = Parent('p')
p.children = ['a', 'b', 'c']
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='joined',
- collection_class=list)})
+ collection_class=list)})
p = Parent('p')
p.children = ['a', 'b', 'c']
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='select',
- collection_class=list)})
+ collection_class=list)})
p = Parent('p')
p.children = ['a', 'b', 'c']
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='select', uselist=False)})
-
p = Parent('p')
p.children = 'value'
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='joined', uselist=False)})
-
p = Parent('p')
p.children = 'value'
def __init__(self, name):
self.name = name
+
class Child(object):
def __init__(self, name):
self.name = name
+
class KVChild(object):
def __init__(self, name, value):
self.name = name
self.value = value
+
class ReconstitutionTest(fixtures.TestBase):
def setup(self):
assert r1.kids == ['c1', 'c2']
# can't do this without parent having a cycle
- #r2 = pickle.loads(pickle.dumps(p.kids))
- #assert r2 == ['c1', 'c2']
+ # r2 = pickle.loads(pickle.dumps(p.kids))
+ # assert r2 == ['c1', 'c2']
def test_pickle_set(self):
mapper(Parent, self.parents,
properties=dict(children=relationship(Child,
- collection_class=set)))
+ collection_class=set)))
mapper(Child, self.children)
p = Parent('p1')
p.kids.update(['c1', 'c2'])
assert r1.kids == set(['c1', 'c2'])
# can't do this without parent having a cycle
- #r2 = pickle.loads(pickle.dumps(p.kids))
- #assert r2 == set(['c1', 'c2'])
+ # r2 = pickle.loads(pickle.dumps(p.kids))
+ # assert r2 == set(['c1', 'c2'])
def test_pickle_dict(self):
mapper(Parent, self.parents,
- properties=dict(children=relationship(KVChild,
- collection_class=
- collections.mapped_collection(PickleKeyFunc('name')))))
+ properties=dict(
+ children=relationship(
+ KVChild,
+ collection_class=collections.mapped_collection(
+ PickleKeyFunc('name')))))
mapper(KVChild, self.children)
p = Parent('p1')
p.kids.update({'c1': 'v1', 'c2': 'v2'})
assert r1.kids == {'c1': 'c1', 'c2': 'c2'}
# can't do this without parent having a cycle
- #r2 = pickle.loads(pickle.dumps(p.kids))
- #assert r2 == {'c1': 'c1', 'c2': 'c2'}
+ # r2 = pickle.loads(pickle.dumps(p.kids))
+ # assert r2 == {'c1': 'c1', 'c2': 'c2'}
+
class PickleKeyFunc(object):
def __init__(self, name):
def __call__(self, obj):
return getattr(obj, self.name)
+
class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('userkeywords', metadata,
- Column('keyword_id', Integer, ForeignKey('keywords.id'), primary_key=True),
- Column('user_id', Integer, ForeignKey('users.id')),
- Column('value', String(50))
- )
+ Column('keyword_id', Integer, ForeignKey('keywords.id'),
+ primary_key=True),
+ Column('user_id', Integer, ForeignKey('users.id')),
+ Column('value', String(50)))
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- Column('name', String(64)),
- Column('singular_id', Integer, ForeignKey('singular.id'))
- )
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column('name', String(64)),
+ Column('singular_id', Integer, ForeignKey('singular.id')))
Table('keywords', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- Column('keyword', String(64)),
- Column('singular_id', Integer, ForeignKey('singular.id'))
- )
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column('keyword', String(64)),
+ Column('singular_id', Integer, ForeignKey('singular.id')))
Table('singular', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- Column('value', String(50))
- )
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column('value', String(50)))
@classmethod
def setup_classes(cls):
# o2m -> m2o
# uselist -> nonuselist
- keywords = association_proxy('user_keywords', 'keyword',
- creator=lambda k: UserKeyword(keyword=k))
+ keywords = association_proxy(
+ 'user_keywords',
+ 'keyword',
+ creator=lambda k: UserKeyword(keyword=k))
# m2o -> o2m
# nonuselist -> uselist
def setup_mappers(cls):
users, Keyword, UserKeyword, singular, \
userkeywords, User, keywords, Singular = (cls.tables.users,
- cls.classes.Keyword,
- cls.classes.UserKeyword,
- cls.tables.singular,
- cls.tables.userkeywords,
- cls.classes.User,
- cls.tables.keywords,
- cls.classes.Singular)
+ cls.classes.Keyword,
+ cls.classes.UserKeyword,
+ cls.tables.singular,
+ cls.tables.userkeywords,
+ cls.classes.User,
+ cls.tables.keywords,
+ cls.classes.Singular)
mapper(User, users, properties={
'singular': relationship(Singular)
@classmethod
def insert_data(cls):
UserKeyword, User, Keyword, Singular = (cls.classes.UserKeyword,
- cls.classes.User,
- cls.classes.Keyword,
- cls.classes.Singular)
+ cls.classes.User,
+ cls.classes.Keyword,
+ cls.classes.Singular)
session = sessionmaker()()
words = (
if ii % 2 == 0:
user.singular = Singular(value=("singular%d" % ii)
- if ii % 4 == 0 else None)
+ if ii % 4 == 0 else None)
session.add(user)
for jj in words[(ii % len(words)):((ii + 3) % len(words))]:
k = Keyword(jj)
UserKeyword, User = self.classes.UserKeyword, self.classes.User
self._equivalent(self.session.query(User).
- filter(User.keywords.any(keyword='jumped'
- )),
+ filter(User.keywords.any(keyword='jumped')),
self.session.query(User).filter(
- User.user_keywords.any(
- UserKeyword.keyword.has(keyword='jumped'
- ))))
+ User.user_keywords.any(
+ UserKeyword.keyword.has(keyword='jumped'))))
def test_filter_has_kwarg_nul_nul(self):
UserKeyword, Keyword = self.classes.UserKeyword, self.classes.Keyword
self._equivalent(self.session.query(Keyword).
- filter(Keyword.user.has(name='user2'
- )),
+ filter(Keyword.user.has(name='user2')),
self.session.query(Keyword).
- filter(Keyword.user_keyword.has(
- UserKeyword.user.has(name='user2'
- ))))
+ filter(Keyword.user_keyword.has(
+ UserKeyword.user.has(name='user2'))))
def test_filter_has_kwarg_nul_ul(self):
User, Singular = self.classes.User, self.classes.Singular
self._equivalent(
- self.session.query(User).\
- filter(User.singular_keywords.any(keyword='jumped')),
- self.session.query(User).\
- filter(
- User.singular.has(
- Singular.keywords.any(keyword='jumped')
- )
- )
- )
+ self.session.query(User).filter(
+ User.singular_keywords.any(keyword='jumped')),
+ self.session.query(User).filter(
+ User.singular.has(Singular.keywords.any(keyword='jumped'))))
def test_filter_any_criterion_ul_nul(self):
UserKeyword, User, Keyword = (self.classes.UserKeyword,
- self.classes.User,
- self.classes.Keyword)
+ self.classes.User,
+ self.classes.Keyword)
- self._equivalent(self.session.query(User).
- filter(User.keywords.any(Keyword.keyword
- == 'jumped')),
- self.session.query(User).
- filter(User.user_keywords.any(
- UserKeyword.keyword.has(Keyword.keyword
- == 'jumped'))))
+ self._equivalent(
+ self.session.query(User).filter(
+ User.keywords.any(Keyword.keyword == 'jumped')),
+ self.session.query(User).filter(
+ User.user_keywords.any(
+ UserKeyword.keyword.has(Keyword.keyword == 'jumped'))))
def test_filter_has_criterion_nul_nul(self):
UserKeyword, User, Keyword = (self.classes.UserKeyword,
- self.classes.User,
- self.classes.Keyword)
+ self.classes.User,
+ self.classes.Keyword)
self._equivalent(self.session.query(Keyword).
- filter(Keyword.user.has(User.name == 'user2')),
+ filter(Keyword.user.has(User.name == 'user2')),
self.session.query(Keyword).
- filter(Keyword.user_keyword.has(
- UserKeyword.user.has(User.name == 'user2'))))
+ filter(Keyword.user_keyword.has(
+ UserKeyword.user.has(User.name == 'user2'))))
def test_filter_any_criterion_nul_ul(self):
User, Keyword, Singular = (self.classes.User,
- self.classes.Keyword,
- self.classes.Singular)
+ self.classes.Keyword,
+ self.classes.Singular)
self._equivalent(
self.session.query(User).
- filter(User.singular_keywords.any(
- Keyword.keyword == 'jumped')),
+ filter(User.singular_keywords.any(
+ Keyword.keyword == 'jumped')),
self.session.query(User).
- filter(
- User.singular.has(
- Singular.keywords.any(Keyword.keyword == 'jumped')
- )
- )
- )
+ filter(User.singular.has(
+ Singular.keywords.any(Keyword.keyword == 'jumped'))))
def test_filter_contains_ul_nul(self):
User = self.classes.User
self._equivalent(self.session.query(User).
- filter(User.keywords.contains(self.kw)),
+ filter(User.keywords.contains(self.kw)),
self.session.query(User).
filter(User.user_keywords.any(keyword=self.kw)))
def test_filter_ne_nul_nul(self):
Keyword = self.classes.Keyword
- self._equivalent(self.session.query(Keyword).filter(Keyword.user != self.u),
- self.session.query(Keyword).
- filter(
- Keyword.user_keyword.has(Keyword.user != self.u)
- )
- )
+ self._equivalent(self.session.query(Keyword).filter(
+ Keyword.user != self.u),
+ self.session.query(Keyword).filter(
+ Keyword.user_keyword.has(Keyword.user != self.u)))
def test_filter_eq_null_nul_nul(self):
UserKeyword, Keyword = self.classes.UserKeyword, self.classes.Keyword
self._equivalent(
- self.session.query(Keyword).filter(Keyword.user == None),
- self.session.query(Keyword).
- filter(
- or_(
- Keyword.user_keyword.has(UserKeyword.user == None),
- Keyword.user_keyword == None
- )
-
- )
- )
+ self.session.query(Keyword).filter(Keyword.user == None), # noqa
+ self.session.query(Keyword).filter(
+ or_(Keyword.user_keyword.has(UserKeyword.user == None),
+ Keyword.user_keyword == None)))
def test_filter_ne_null_nul_nul(self):
UserKeyword, Keyword = self.classes.UserKeyword, self.classes.Keyword
self._equivalent(
- self.session.query(Keyword).filter(Keyword.user != None),
- self.session.query(Keyword).
- filter(
- Keyword.user_keyword.has(UserKeyword.user != None),
- )
- )
+ self.session.query(Keyword).filter(
+ Keyword.user != None), # noqa
+ self.session.query(Keyword).filter(
+ Keyword.user_keyword.has(UserKeyword.user != None)))
def test_filter_eq_None_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
- self.session.query(User).filter(User.singular_value == None),
self.session.query(User).filter(
- or_(
- User.singular.has(Singular.value == None),
- User.singular == None
- )
- )
- )
+ User.singular_value == None), # noqa
+ self.session.query(User).filter(or_(
+ User.singular.has(Singular.value == None),
+ User.singular == None)))
def test_filter_ne_value_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
- self.session.query(User).filter(User.singular_value != "singular4"),
self.session.query(User).filter(
- User.singular.has(Singular.value != "singular4"),
- )
- )
+ User.singular_value != "singular4"),
+ self.session.query(User).filter(
+ User.singular.has(Singular.value != "singular4")))
def test_filter_eq_value_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
- self.session.query(User).filter(User.singular_value == "singular4"),
self.session.query(User).filter(
- User.singular.has(Singular.value == "singular4"),
- )
- )
+ User.singular_value == "singular4"),
+ self.session.query(User).filter(
+ User.singular.has(Singular.value == "singular4")))
def test_filter_ne_None_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
- self.session.query(User).filter(User.singular_value != None),
self.session.query(User).filter(
- User.singular.has(Singular.value != None),
- )
- )
+ User.singular_value != None), # noqa
+ self.session.query(User).filter(
+ User.singular.has(Singular.value != None)))
def test_has_nul(self):
# a special case where we provide an empty has() on a
Keyword = self.classes.Keyword
assert_raises(exc.InvalidRequestError,
- lambda: Keyword.user.contains(self.u))
+ lambda: Keyword.user.contains(self.u))
def test_filter_scalar_any_fails_nul_nul(self):
Keyword = self.classes.Keyword
assert_raises(exc.InvalidRequestError,
- lambda: Keyword.user.any(name='user2'))
+ lambda: Keyword.user.any(name='user2'))
def test_filter_collection_has_fails_ul_nul(self):
User = self.classes.User
assert_raises(exc.InvalidRequestError,
- lambda: User.keywords.has(keyword='quick'))
+ lambda: User.keywords.has(keyword='quick'))
def test_filter_collection_eq_fails_ul_nul(self):
User = self.classes.User
assert_raises(exc.InvalidRequestError,
- lambda: User.keywords == self.kw)
+ lambda: User.keywords == self.kw)
def test_filter_collection_ne_fails_ul_nul(self):
User = self.classes.User
assert_raises(exc.InvalidRequestError,
- lambda: User.keywords != self.kw)
+ lambda: User.keywords != self.kw)
def test_join_separate_attr(self):
User = self.classes.User
"userkeywords.keyword_id"
)
+
class DictOfTupleUpdateTest(fixtures.TestBase):
def setup(self):
class B(object):
m = MetaData()
a = Table('a', m, Column('id', Integer, primary_key=True))
b = Table('b', m, Column('id', Integer, primary_key=True),
- Column('aid', Integer, ForeignKey('a.id')))
+ Column('aid', Integer, ForeignKey('a.id')))
mapper(A, a, properties={
- 'orig': relationship(B, collection_class=attribute_mapped_collection('key'))
+ 'orig': relationship(
+ B,
+ collection_class=attribute_mapped_collection('key'))
})
mapper(B, b)
self.A = A
"""
The underlying reflect call accepts an optional schema argument.
This is for determining which database schema to load.
- This test verifies that prepare passes a default None if no schema is provided.
+ This test verifies that prepare passes a default None if no schema is
+ provided.
"""
Base = automap_base(metadata=self.metadata)
engine_mock = Mock()
from sqlalchemy.orm import exc as orm_exc
import itertools
from sqlalchemy.testing import mock
+from sqlalchemy.testing.assertsql import CompiledSQL
class BakedTest(_fixtures.FixtureTest):
def test_initial_key(self):
User = self.classes.User
session = Session()
- l1 = lambda: session.query(User)
+
+ def l1(): return session.query(User)
q1 = self.bakery(l1)
self._assert_cache_key(
q1._cache_key,
def test_inplace_add(self):
User = self.classes.User
session = Session()
- l1 = lambda: session.query(User)
- l2 = lambda q: q.filter(User.name == bindparam('name'))
+
+ def l1(): return session.query(User)
+
+ def l2(q): return q.filter(User.name == bindparam('name'))
q1 = self.bakery(l1)
self._assert_cache_key(
q1._cache_key,
def test_inplace_add_operator(self):
User = self.classes.User
session = Session()
- l1 = lambda: session.query(User)
- l2 = lambda q: q.filter(User.name == bindparam('name'))
+
+ def l1(): return session.query(User)
+
+ def l2(q): return q.filter(User.name == bindparam('name'))
q1 = self.bakery(l1)
self._assert_cache_key(
q1._cache_key,
def test_chained_add(self):
User = self.classes.User
session = Session()
- l1 = lambda: session.query(User)
- l2 = lambda q: q.filter(User.name == bindparam('name'))
+
+ def l1(): return session.query(User)
+
+ def l2(q): return q.filter(User.name == bindparam('name'))
q1 = self.bakery(l1)
q2 = q1.with_criteria(l2)
def test_chained_add_operator(self):
User = self.classes.User
session = Session()
- l1 = lambda: session.query(User)
- l2 = lambda q: q.filter(User.name == bindparam('name'))
+
+ def l1(): return session.query(User)
+
+ def l2(q): return q.filter(User.name == bindparam('name'))
q1 = self.bakery(l1)
q2 = q1 + l2
User = self.classes.User
queue = [7, 8]
- fn = lambda s: s.query(User.id).filter_by(id=queue.pop(0))
+
+ def fn(s): return s.query(User.id).filter_by(id=queue.pop(0))
bq1 = self.bakery(fn, 7)
bq2 = self.bakery(fn, 8)
assert_result = [
User(id=7,
- addresses=[Address(id=1, email_address='jack@bean.com')],
- orders=[Order(id=1), Order(id=3), Order(id=5)]),
+ addresses=[Address(id=1, email_address='jack@bean.com')],
+ orders=[Order(id=1), Order(id=3), Order(id=5)]),
User(id=8, addresses=[
Address(id=2, email_address='ed@wood.com'),
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
]),
User(id=9,
- addresses=[Address(id=5)],
- orders=[Order(id=2), Order(id=4)]),
+ addresses=[Address(id=5)],
+ orders=[Order(id=2), Order(id=4)]),
User(id=10, addresses=[])
]
sess.close()
-from sqlalchemy.testing.assertsql import CompiledSQL
-
class LazyLoaderTest(testing.AssertsCompiledSQL, BakedTest):
run_setup_mappers = 'each'
propagate_to_loaders = True
sess = Session()
- u1 = sess.query(User).options(MyBogusOption()).filter(User.id == 8).one()
+ u1 = sess.query(User).options(MyBogusOption()).filter(User.id == 8) \
+ .one()
def go():
eq_(u1.addresses[0].user, u1)
# 2. o2m lazyload where m2o backrefs have an eager load, test
# that eager load is canceled out
# 3. uselist = False, uselist=False assertion
-
def test_column(self):
class MyThingy(ColumnClause):
- def __init__(self, arg= None):
+ def __init__(self, arg=None):
super(MyThingy, self).__init__(arg or 'MYTHINGY!')
@compiles(MyThingy)
return compiler.visit_create_column(element, **kw)
t = Table('t', MetaData(), Column('a', Integer),
- Column('xmin', Integer),
- Column('c', Integer))
+ Column('xmin', Integer),
+ Column('c', Integer))
self.assert_compile(
CreateTable(t),
"CREATE TABLE t (a INTEGER, c INTEGER)"
)
+
def test_types(self):
class MyType(TypeEngine):
pass
dialect=postgresql.dialect()
)
-
def test_stateful(self):
class MyThingy(ColumnClause):
def __init__(self):
self.assert_compile(
InsertFromSelect(
t1,
- select([t1]).where(t1.c.x>5)
+ select([t1]).where(t1.c.x > 5)
),
"INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z "
"FROM mytable WHERE mytable.x > :x_1)"
def visit_drop_thingy(thingy, compiler, **kw):
return "DROP THINGY"
- self.assert_compile(AddThingy(),
- "ADD THINGY"
- )
+ self.assert_compile(AddThingy(), "ADD THINGY")
- self.assert_compile(DropThingy(),
- "DROP THINGY"
- )
+ self.assert_compile(DropThingy(), "DROP THINGY")
from sqlalchemy.dialects.sqlite import base
self.assert_compile(AddThingy(),
- "ADD SPECIAL SL THINGY",
- dialect=base.dialect()
- )
+ "ADD SPECIAL SL THINGY",
+ dialect=base.dialect())
self.assert_compile(DropThingy(),
- "DROP THINGY",
- dialect=base.dialect()
- )
+ "DROP THINGY",
+ dialect=base.dialect())
@compiles(DropThingy, 'sqlite')
def visit_drop_thingy(thingy, compiler, **kw):
return "DROP SPECIAL SL THINGY"
self.assert_compile(DropThingy(),
- "DROP SPECIAL SL THINGY",
- dialect=base.dialect()
- )
+ "DROP SPECIAL SL THINGY",
+ dialect=base.dialect())
- self.assert_compile(DropThingy(),
- "DROP THINGY",
- )
+ self.assert_compile(DropThingy(), "DROP THINGY")
def test_functions(self):
from sqlalchemy.dialects import postgresql
def test_binds_in_select(self):
t = table('t',
- column('a'),
- column('b'),
- column('c')
- )
+ column('a'),
+ column('b'),
+ column('c'))
@compiles(BindParameter)
def gen_bind(element, compiler, **kw):
def test_binds_in_dml(self):
t = table('t',
- column('a'),
- column('b'),
- column('c')
- )
+ column('a'),
+ column('b'),
+ column('c'))
@compiles(BindParameter)
def gen_bind(element, compiler, **kw):
self.assert_compile(
t.insert(),
"INSERT INTO t (a, b) VALUES (BIND(:a), BIND(:b))",
- {'a':1, 'b':2},
+ {'a': 1, 'b': 2},
use_default_dialect=True
)
pass
register_class(A)
- mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+ def mgr_factory(cls): return instrumentation.ClassManager(cls)
class B(object):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
pass
register_class(A)
- mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+ def mgr_factory(cls): return instrumentation.ClassManager(cls)
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
pass
# delay registration
- mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+ def mgr_factory(cls): return instrumentation.ClassManager(cls)
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
register_class, A)
def test_diamond_b1(self):
- mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+ def mgr_factory(cls): return instrumentation.ClassManager(cls)
class A(object):
pass
register_class, B1)
def test_diamond_b2(self):
- mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+ def mgr_factory(cls): return instrumentation.ClassManager(cls)
class A(object):
pass
register_class, B1)
def test_diamond_c_b(self):
- mgr_factory = lambda cls: instrumentation.ClassManager(cls)
+ def mgr_factory(cls): return instrumentation.ClassManager(cls)
class A(object):
pass
# TODO: ShardTest can be turned into a base for further subclasses
-
-
class ShardTest(object):
__skip_if__ = (lambda: util.win32,)
__requires__ = 'sqlite',
meta = MetaData()
ids = Table('ids', meta,
- Column('nextid', Integer, nullable=False))
+ Column('nextid', Integer, nullable=False))
def id_generator(ctx):
# in reality, might want to use a separate transaction for this.
c.execute(ids.update(values={ids.c.nextid: ids.c.nextid + 1}))
return nextid
- weather_locations = Table("weather_locations", meta,
- Column('id', Integer, primary_key=True, default=id_generator),
- Column('continent', String(30), nullable=False),
- Column('city', String(50), nullable=False),
- schema=self.schema
- )
+ weather_locations = Table(
+ "weather_locations", meta,
+ Column('id', Integer, primary_key=True, default=id_generator),
+ Column('continent', String(30), nullable=False),
+ Column('city', String(50), nullable=False),
+ schema=self.schema
+ )
weather_reports = Table(
'weather_reports',
self.setup_session()
self.setup_mappers()
-
@classmethod
def setup_session(cls):
global create_session
def query_chooser(query):
ids = []
-
class FindContinent(sql.ClauseVisitor):
def visit_binary(self, binary):
return ids
create_session = sessionmaker(class_=ShardedSession,
- autoflush=True, autocommit=False)
+ autoflush=True, autocommit=False)
create_session.configure(shards={
'north_america': db1,
'asia': db2,
}, shard_chooser=shard_chooser, id_chooser=id_chooser,
query_chooser=query_chooser)
-
@classmethod
def setup_mappers(cls):
global WeatherLocation, Report
dublin,
brasilia,
quito,
- ]:
+ ]:
sess.add(c)
sess.commit()
sess.close()
eq_(db2.execute(weather_locations.select()).fetchall(), [(1,
'Asia', 'Tokyo')])
eq_(db1.execute(weather_locations.select()).fetchall(), [(2,
- 'North America', 'New York'), (3, 'North America', 'Toronto'
- )])
- eq_(sess.execute(weather_locations.select(), shard_id='asia'
- ).fetchall(), [(1, 'Asia', 'Tokyo')])
+ 'North America', 'New York'), (3, 'North America', 'Toronto')])
+ eq_(sess.execute(weather_locations.select(), shard_id='asia')
+ .fetchall(), [(1, 'Asia', 'Tokyo')])
t = sess.query(WeatherLocation).get(tokyo.id)
eq_(t.city, tokyo.city)
eq_(t.reports[0].temperature, 80.0)
north_american_cities = \
- sess.query(WeatherLocation).filter(WeatherLocation.continent
- == 'North America')
+ sess.query(WeatherLocation).filter(
+ WeatherLocation.continent == 'North America')
eq_(set([c.city for c in north_american_cities]),
set(['New York', 'Toronto']))
asia_and_europe = \
def test_shard_id_event(self):
canary = []
+
def load(instance, ctx):
canary.append(ctx.attributes["shard_id"])
sess = self._fixture_data()
tokyo = sess.query(WeatherLocation).\
- filter_by(city="Tokyo").set_shard("asia").one()
+ filter_by(city="Tokyo").set_shard("asia").one()
sess.query(WeatherLocation).all()
eq_(
canary,
['asia', 'north_america', 'north_america',
- 'europe', 'europe', 'south_america',
- 'south_america']
+ 'europe', 'europe', 'south_america',
+ 'south_america']
)
+
class DistinctEngineShardTest(ShardTest, fixtures.TestBase):
def _init_dbs(self):
db1 = testing_engine('sqlite:///shard1.db',
- options=dict(pool_threadlocal=True))
+ options=dict(pool_threadlocal=True))
db2 = testing_engine('sqlite:///shard2.db')
db3 = testing_engine('sqlite:///shard3.db')
db4 = testing_engine('sqlite:///shard4.db')
for i in range(1, 5):
os.remove("shard%d.db" % i)
+
class AttachedFileShardTest(ShardTest, fixtures.TestBase):
schema = "changeme"
def _init_dbs(self):
db1 = testing_engine('sqlite://', options={"execution_options":
- {"shard_id": "shard1"}})
+ {"shard_id": "shard1"}})
db2 = db1.execution_options(shard_id="shard2")
db3 = db1.execution_options(shard_id="shard3")
db4 = db1.execution_options(shard_id="shard4")
import re
+
@event.listens_for(db1, "before_cursor_execute", retval=True)
def _switch_shard(conn, cursor, stmt, params, context, executemany):
shard_id = conn._execution_options['shard_id']
return stmt, params
return db1, db2, db3, db4
-
-
def __eq__(self, other):
if other is None:
- return self.expression == None
+ return self.expression is None
else:
return func.upper(self.expression) == func.upper(other)
def test_value(self):
A = self._fixture()
- eq_(str(A.value==5), "upper(a.value) = upper(:upper_1)")
+ eq_(str(A.value == 5), "upper(a.value) = upper(:upper_1)")
def test_aliased_value(self):
A = self._fixture()
- eq_(str(aliased(A).value==5), "upper(a_1.value) = upper(:upper_1)")
+ eq_(str(aliased(A).value == 5), "upper(a_1.value) = upper(:upper_1)")
def test_query(self):
A = self._fixture()
"AND foo(a.value) + bar(a.value) = :param_1)"
)
-
def test_aliased_expression(self):
A = self._fixture()
self.assert_compile(
A = self._fixture()
sess = Session()
self.assert_compile(
- sess.query(A).filter(A.value(5)=="foo"),
+ sess.query(A).filter(A.value(5) == "foo"),
"SELECT a.value AS a_value, a.id AS a_id "
"FROM a WHERE foo(a.value, :foo_1) + :foo_2 = :param_1"
)
sess = Session()
a1 = aliased(A)
self.assert_compile(
- sess.query(a1).filter(a1.value(5)=="foo"),
+ sess.query(a1).filter(a1.value(5) == "foo"),
"SELECT a_1.value AS a_1_value, a_1.id AS a_1_id "
"FROM a AS a_1 WHERE foo(a_1.value, :foo_1) + :foo_2 = :param_1"
)
eq_(
BankAccount.balance.__doc__,
"Return an Amount view of the current balance.")
-
Json(json={'field': 20})])
s.commit()
- a1 = s.query(Json).filter(Json.json['field'].astext.cast(Integer) == 10)\
+ a1 = s.query(Json)\
+ .filter(Json.json['field'].astext.cast(Integer) == 10)\
.one()
a2 = s.query(Json).filter(Json.field.astext == '10').one()
eq_(a1.id, a2.id)
assert f1 in sess.dirty
-
class MutableWithScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest):
@classmethod
self._test_non_mutable()
-class MutableListWithScalarPickleTest(_MutableListTestBase, fixtures.MappedTest):
+class MutableListWithScalarPickleTest(_MutableListTestBase,
+ fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
metadata = None
-# order in whole steps
+
def step_numbering(step):
+ """ order in whole steps """
def f(index, collection):
return step * index
return f
-# almost fibonacci- skip the first 2 steps
-# e.g. 1, 2, 3, 5, 8, ... instead of 0, 1, 1, 2, 3, ...
-# otherwise ordering of the elements at '1' is undefined... ;)
+
def fibonacci_numbering(order_col):
+ """
+ almost fibonacci- skip the first 2 steps
+ e.g. 1, 2, 3, 5, 8, ... instead of 0, 1, 1, 2, 3, ...
+ otherwise ordering of the elements at '1' is undefined... ;)
+ """
def f(index, collection):
if index == 0:
return 1
getattr(collection[index - 2], order_col))
return f
-# 0 -> A, 1 -> B, ... 25 -> Z, 26 -> AA, 27 -> AB, ...
+
def alpha_ordering(index, collection):
+ """
+ 0 -> A, 1 -> B, ... 25 -> Z, 26 -> AA, 27 -> AB, ...
+ """
s = ''
while index > 25:
d = index / 26
s += chr(index + 65)
return s
+
class OrderingListTest(fixtures.TestBase):
def setup(self):
global metadata, slides_table, bullets_table, Slide, Bullet
class Slide(object):
def __init__(self, name):
self.name = name
+
def __repr__(self):
return '<Slide "%s">' % self.name
class Bullet(object):
def __init__(self, text):
self.text = text
+
def __repr__(self):
return '<Bullet "%s" pos %s>' % (self.text, self.position)
mapper(Slide, slides_table, properties={
'bullets': relationship(Bullet, lazy='joined',
- collection_class=test_collection_class,
- backref='slide',
- order_by=[bullets_table.c.position])
- })
+ collection_class=test_collection_class,
+ backref='slide',
+ order_by=[bullets_table.c.position])})
mapper(Bullet, bullets_table)
metadata.create_all()
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 4)
- titles = ['s1/b1','s1/b2','s1/b100','s1/b4']
+ titles = ['s1/b1', 's1/b2', 's1/b100', 's1/b4']
found = [b.text for b in srt.bullets]
self.assert_(titles == found)
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 5)
- titles = ['s1/b1','s1/b2','s1/b100','s1/b4', 'raw']
+ titles = ['s1/b1', 's1/b2', 's1/b100', 's1/b4', 'raw']
found = [b.text for b in srt.bullets]
eq_(titles, found)
session.expunge_all()
srt = session.query(Slide).get(id)
- titles = ['s1/b1','s1/b2','s1/b100','s1/b4', 'raw', 'raw2']
+ titles = ['s1/b1', 's1/b2', 's1/b100', 's1/b4', 'raw', 'raw2']
found = [b.text for b in srt.bullets]
eq_(titles, found)
self.assert_(srt.bullets)
self.assert_(len(srt.bullets) == 6)
- texts = ['1','2','insert_at_2','3','4','999']
+ texts = ['1', '2', 'insert_at_2', '3', '4', '999']
found = [b.text for b in srt.bullets]
self.assert_(texts == found)
def test_slice(self):
self._setup(ordering_list('position'))
- b = [ Bullet('1'), Bullet('2'), Bullet('3'),
- Bullet('4'), Bullet('5'), Bullet('6') ]
+ b = [Bullet('1'), Bullet('2'), Bullet('3'),
+ Bullet('4'), Bullet('5'), Bullet('6')]
s1 = Slide('Slide #1')
# 1, 2, 3
# 1, 4, 5, 6, 3
s1.bullets[1:2] = b[3:6]
- for li, bi in (0,0), (1,3), (2,4), (3,5), (4,2):
+ for li, bi in (0, 0), (1, 3), (2, 4), (3, 5), (4, 2):
self.assert_(s1.bullets[li].position == li)
self.assert_(s1.bullets[li] == b[bi])
# 1, 6, 3
del s1.bullets[1:3]
- for li, bi in (0,0), (1,5), (2,2):
+ for li, bi in (0, 0), (1, 5), (2, 2):
self.assert_(s1.bullets[li].position == li)
self.assert_(s1.bullets[li] == b[bi])
self._setup(ordering_list('position'))
s1 = Slide('Slide #1')
- s1.bullets = [ Bullet('1'), Bullet('2'), Bullet('3') ]
+ s1.bullets = [Bullet('1'), Bullet('2'), Bullet('3')]
self.assert_(len(s1.bullets) == 3)
self.assert_(s1.bullets[2].position == 2)
self.assert_(new_bullet.position is None)
# mark existing bullet as db-deleted before replacement.
- #session.delete(s1.bullets[1])
+ # session.delete(s1.bullets[1])
s1.bullets[1] = new_bullet
self.assert_(new_bullet.position == 1)
[0, 1, 2]
)
-
def test_funky_ordering(self):
class Pos(object):
def __init__(self):
stepped.append(Pos())
stepped.append(Pos())
- for li, pos in (0,0), (1,2), (2,4), (3,6):
+ for li, pos in (0, 0), (1, 2), (2, 4), (3, 6):
self.assert_(stepped[li].position == pos)
- fib_factory = ordering_list('position',
- ordering_func=fibonacci_numbering('position'))
+ fib_factory = ordering_list(
+ 'position',
+ ordering_func=fibonacci_numbering('position'))
fibbed = fib_factory()
fibbed.append(Pos())
fibbed.append(Pos())
fibbed.append(Pos())
- for li, pos in (0,1), (1,2), (2,3), (3,5), (4,8):
+ for li, pos in (0, 1), (1, 2), (2, 3), (3, 5), (4, 8):
self.assert_(fibbed[li].position == pos)
fibbed.insert(2, Pos())
(5, 13),
(6, 21),
(7, 34),
- ):
+ ):
self.assert_(fibbed[li].position == pos)
alpha_factory = ordering_list('position',
alpha.insert(1, Pos())
- for li, pos in (0,'A'), (1,'B'), (2,'C'), (3,'D'):
+ for li, pos in (0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'):
self.assert_(alpha[li].position == pos)
-
def test_picklability(self):
from sqlalchemy.ext.orderinglist import OrderingList
self.assert_(copy == olist)
self.assert_(copy.__dict__ == olist.__dict__)
+
class DummyItem(object):
def __init__(self, order=None):
self.order = order
from sqlalchemy.testing import fixtures
+
class User(fixtures.ComparableEntity):
pass
+
class Address(fixtures.ComparableEntity):
pass
+
users = addresses = Session = None
+
class SerializeTest(AssertsCompiledSQL, fixtures.MappedTest):
run_setup_mappers = 'once'
def setup_mappers(cls):
global Session
Session = scoped_session(sessionmaker())
- mapper(User, users, properties={'addresses'
- : relationship(Address, backref='user',
- order_by=addresses.c.id)})
+ mapper(User, users,
+ properties={'addresses': relationship(Address, backref='user',
+ order_by=addresses.c.id)})
mapper(Address, addresses)
configure_mappers()
@classmethod
def insert_data(cls):
params = [dict(list(zip(('id', 'name'), column_values)))
- for column_values in [(7, 'jack'), (8, 'ed'), (9,
- 'fred'), (10, 'chuck')]]
+ for column_values in [(7, 'jack'), (8, 'ed'), (9, 'fred'),
+ (10, 'chuck')]]
users.insert().execute(params)
- addresses.insert().execute([dict(list(zip(('id', 'user_id', 'email'
- ), column_values)))
- for column_values in [(1, 7,
- 'jack@bean.com'), (2, 8,
- 'ed@wood.com'), (3, 8,
- 'ed@bettyboop.com'), (4, 8,
- 'ed@lala.com'), (5, 9,
- 'fred@fred.com')]])
+ addresses.insert().execute([dict(list(zip(('id', 'user_id', 'email'),
+ column_values)))
+ for column_values in [
+ (1, 7, 'jack@bean.com'),
+ (2, 8, 'ed@wood.com'),
+ (3, 8, 'ed@bettyboop.com'),
+ (4, 8, 'ed@lala.com'),
+ (5, 9, 'fred@fred.com')]])
def test_tables(self):
assert serializer.loads(serializer.dumps(users, -1),
(8, 'ed'), (8, 'ed'), (9, 'fred')])
def test_query_one(self):
- q = Session.query(User).\
- filter(User.name == 'ed').\
- options(joinedload(User.addresses))
+ q = Session.query(User).filter(User.name == 'ed').\
+ options(joinedload(User.addresses))
+
+ q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)
- q2 = serializer.loads(
- serializer.dumps(q, -1),
- users.metadata, Session)
def go():
- eq_(q2.all(), [
- User(name='ed', addresses=[Address(id=2),
- Address(id=3), Address(id=4)])])
+ eq_(q2.all(),
+ [User(name='ed',
+ addresses=[Address(id=2), Address(id=3), Address(id=4)])]
+ )
self.assert_sql_count(testing.db, go, 1)
eq_(q2.join(User.addresses).filter(Address.email
== 'ed@bettyboop.com').value(func.count(literal_column('*'))), 1)
u1 = Session.query(User).get(8)
- q = Session.query(Address).filter(Address.user
- == u1).order_by(desc(Address.email))
+ q = Session.query(Address).filter(Address.user == u1)\
+ .order_by(desc(Address.email))
q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
Session)
eq_(q2.all(), [Address(email='ed@wood.com'),
@testing.requires.non_broken_pickle
def test_query_two(self):
- q = \
- Session.query(User).join(User.addresses).\
- filter(Address.email.like('%fred%'))
+ q = Session.query(User).join(User.addresses).\
+ filter(Address.email.like('%fred%'))
q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
Session)
eq_(q2.all(), [User(name='fred')])
eq_(list(q2.values(User.id, User.name)), [(9, 'fred')])
# fails too often/randomly
- #@testing.requires.non_broken_pickle
- #def test_query_three(self):
+ # @testing.requires.non_broken_pickle
+ # def test_query_three(self):
# ua = aliased(User)
# q = \
# Session.query(ua).join(ua.addresses).\
def test_aliases(self):
u7, u8, u9, u10 = Session.query(User).order_by(User.id).all()
ualias = aliased(User)
- q = Session.query(User, ualias).join(ualias, User.id
- < ualias.id).filter(User.id < 9).order_by(User.id,
- ualias.id)
+ q = Session.query(User, ualias)\
+ .join(ualias, User.id < ualias.id)\
+ .filter(User.id < 9)\
+ .order_by(User.id, ualias.id)
eq_(list(q.all()), [(u7, u8), (u7, u9), (u7, u10), (u8, u9),
(u8, u10)])
q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
def test_unicode(self):
m = MetaData()
t = Table(ue('\u6e2c\u8a66'), m,
- Column(ue('\u6e2c\u8a66_id'), Integer))
+ Column(ue('\u6e2c\u8a66_id'), Integer))
expr = select([t]).where(t.c[ue('\u6e2c\u8a66_id')] == 5)
# use OrderedDict on this one to support some tests that
# assert the order of attributes (e.g. orm/test_inspect)
mapper(User, users, properties=util.OrderedDict(
- [('addresses', relationship(Address, backref='user', order_by=addresses.c.id)),
- ('orders', relationship(Order, backref='user', order_by=orders.c.id)), # o2m, m2o
- ]
+ [('addresses', relationship(Address, backref='user',
+ order_by=addresses.c.id)),
+ ('orders', relationship(Order, backref='user',
+ order_by=orders.c.id)), # o2m, m2o
+ ]
))
mapper(Address, addresses, properties={
- 'dingaling':relationship(Dingaling, uselist=False, backref="address") #o2o
+ # o2o
+ 'dingaling': relationship(Dingaling, uselist=False,
+ backref="address")
})
mapper(Dingaling, dingalings)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, order_by=items.c.id), #m2m
- 'address':relationship(Address), # m2o
+ # m2m
+ 'items': relationship(Item, secondary=order_items,
+ order_by=items.c.id),
+ 'address': relationship(Address), # m2o
})
mapper(Item, items, properties={
- 'keywords':relationship(Keyword, secondary=item_keywords) #m2m
+ 'keywords': relationship(Keyword, secondary=item_keywords) # m2m
})
mapper(Keyword, keywords)
mapper(Node, nodes, properties={
- 'children':relationship(Node,
- backref=backref('parent', remote_side=[nodes.c.id])
- )
+ 'children': relationship(Node,
+ backref=backref('parent',
+ remote_side=[nodes.c.id]))
})
mapper(CompositePk, composite_pk_table)
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('email_address', String(50), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('email_bounces', metadata,
Column('id', Integer, ForeignKey('addresses.id')),
- Column('bounces', Integer)
- )
+ Column('bounces', Integer))
Table('orders', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', None, ForeignKey('users.id')),
- Column('address_id', None, ForeignKey('addresses.id')),
- Column('description', String(30)),
- Column('isopen', Integer),
- test_needs_acid=True,
- test_needs_fk=True
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', None, ForeignKey('users.id')),
+ Column('address_id', None, ForeignKey('addresses.id')),
+ Column('description', String(30)),
+ Column('isopen', Integer),
+ test_needs_acid=True,
+ test_needs_fk=True)
Table("dingalings", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('address_id', None, ForeignKey('addresses.id')),
Column('data', String(30)),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('items', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('description', String(30), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('order_items', metadata,
Column('item_id', None, ForeignKey('items.id')),
Column('order_id', None, ForeignKey('orders.id')),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('keywords', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('item_keywords', metadata,
Column('item_id', None, ForeignKey('items.id')),
Column('keyword_id', None, ForeignKey('keywords.id')),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('nodes', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id')),
- Column('data', String(30)),
- test_needs_acid=True,
- test_needs_fk=True
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')),
+ Column('data', String(30)),
+ test_needs_acid=True,
+ test_needs_fk=True)
Table('composite_pk_table', metadata,
- Column('i', Integer, primary_key=True),
- Column('j', Integer, primary_key=True),
- Column('k', Integer, nullable=False),
- )
+ Column('i', Integer, primary_key=True),
+ Column('j', Integer, primary_key=True),
+ Column('k', Integer, nullable=False))
@classmethod
def setup_mappers(cls):
@classmethod
def fixtures(cls):
return dict(
- users = (
+ users=(
('id', 'name'),
(7, 'jack'),
(8, 'ed'),
(10, 'chuck')
),
- addresses = (
+ addresses=(
('id', 'user_id', 'email_address'),
(1, 7, "jack@bean.com"),
(2, 8, "ed@wood.com"),
(5, 9, "fred@fred.com")
),
- email_bounces = (
+ email_bounces=(
('id', 'bounces'),
(1, 1),
(2, 0),
(5, 0)
),
- orders = (
+ orders=(
('id', 'user_id', 'description', 'isopen', 'address_id'),
(1, 7, 'order 1', 0, 1),
(2, 9, 'order 2', 0, 4),
(5, 7, 'order 5', 0, None)
),
- dingalings = (
+ dingalings=(
('id', 'address_id', 'data'),
(1, 2, 'ding 1/2'),
(2, 5, 'ding 2/5')
),
- items = (
+ items=(
('id', 'description'),
(1, 'item 1'),
(2, 'item 2'),
(5, 'item 5')
),
- order_items = (
+ order_items=(
('item_id', 'order_id'),
(1, 1),
(2, 1),
(5, 5)
),
- keywords = (
+ keywords=(
('id', 'name'),
(1, 'blue'),
(2, 'red'),
(7, 'square')
),
- item_keywords = (
+ item_keywords=(
('keyword_id', 'item_id'),
(2, 1),
(2, 2),
(6, 3)
),
- nodes = (
+ nodes=(
('id', 'parent_id', 'data'),
),
- composite_pk_table = (
+ composite_pk_table=(
('i', 'j', 'k'),
(1, 2, 3),
(2, 1, 4),
(1, 1, 5),
- (2, 2,6)
+ (2, 2, 6)
)
)
return [
User(id=7,
- addresses=[
- Address(id=1)],
+ addresses=[Address(id=1)],
orders=[
- Order(description='order 1',
- items=[
- Item(description='item 1'),
- Item(description='item 2'),
- Item(description='item 3')]),
- Order(description='order 3'),
- Order(description='order 5')]),
+ Order(description='order 1',
+ items=[
+ Item(description='item 1'),
+ Item(description='item 2'),
+ Item(description='item 3')]),
+ Order(description='order 3'),
+ Order(description='order 5')]),
User(id=8,
- addresses=[
- Address(id=2),
- Address(id=3),
- Address(id=4)]),
+ addresses=[Address(id=2), Address(id=3), Address(id=4)]),
User(id=9,
addresses=[
- Address(id=5)],
+ Address(id=5)],
orders=[
- Order(description='order 2',
- items=[
- Item(description='item 1'),
- Item(description='item 2'),
- Item(description='item 3')]),
- Order(description='order 4',
- items=[
- Item(description='item 1'),
- Item(description='item 5')])]),
+ Order(description='order 2',
+ items=[
+ Item(description='item 1'),
+ Item(description='item 2'),
+ Item(description='item 3')]),
+ Order(description='order 4',
+ items=[
+ Item(description='item 1'),
+ Item(description='item 5')])]),
User(id=10, addresses=[])]
@property
return [
User(id=7,
orders=[
- Order(id=1,
- items=[
- Item(id=1),
- Item(id=2),
- Item(id=3)]),
- Order(id=3,
- items=[
- Item(id=3),
- Item(id=4),
- Item(id=5)]),
- Order(id=5,
- items=[
- Item(id=5)])]),
+ Order(id=1,
+ items=[Item(id=1), Item(id=2), Item(id=3)]),
+ Order(id=3,
+ items=[Item(id=3), Item(id=4), Item(id=5)]),
+ Order(id=5,
+ items=[Item(id=5)])]),
User(id=8,
orders=[]),
User(id=9,
orders=[
- Order(id=2,
- items=[
- Item(id=1),
- Item(id=2),
- Item(id=3)]),
- Order(id=4,
- items=[
- Item(id=1),
- Item(id=5)])]),
+ Order(id=2,
+ items=[Item(id=1), Item(id=2), Item(id=3)]),
+ Order(id=4,
+ items=[Item(id=1), Item(id=5)])]),
User(id=10)]
@property
return [
Item(id=1,
keywords=[
- Keyword(name='red'),
- Keyword(name='big'),
- Keyword(name='round')]),
+ Keyword(name='red'),
+ Keyword(name='big'),
+ Keyword(name='round')]),
Item(id=2,
keywords=[
- Keyword(name='red'),
- Keyword(name='small'),
- Keyword(name='square')]),
+ Keyword(name='red'),
+ Keyword(name='small'),
+ Keyword(name='square')]),
Item(id=3,
keywords=[
- Keyword(name='green'),
- Keyword(name='big'),
- Keyword(name='round')]),
+ Keyword(name='green'),
+ Keyword(name='big'),
+ Keyword(name='round')]),
Item(id=4,
keywords=[]),
Item(id=5,
User, Order = self.test.classes.User, self.test.classes.Order
item1, item2, item3, item4, item5 = \
- Item(id=1,
- keywords=[
- Keyword(name='red'),
- Keyword(name='big'),
- Keyword(name='round')]),\
- Item(id=2,
- keywords=[
- Keyword(name='red'),
- Keyword(name='small'),
- Keyword(name='square')]),\
- Item(id=3,
- keywords=[
- Keyword(name='green'),
- Keyword(name='big'),
- Keyword(name='round')]),\
- Item(id=4,
- keywords=[]),\
- Item(id=5,
- keywords=[])
+ Item(id=1,
+ keywords=[
+ Keyword(name='red'),
+ Keyword(name='big'),
+ Keyword(name='round')]),\
+ Item(id=2,
+ keywords=[
+ Keyword(name='red'),
+ Keyword(name='small'),
+ Keyword(name='square')]),\
+ Item(id=3,
+ keywords=[
+ Keyword(name='green'),
+ Keyword(name='big'),
+ Keyword(name='round')]),\
+ Item(id=4, keywords=[]),\
+ Item(id=5, keywords=[])
user_result = [
- User(id=7,
- orders=[
+ User(id=7,
+ orders=[
Order(id=1,
items=[item1, item2, item3]),
Order(id=3,
items=[item3, item4, item5]),
Order(id=5,
items=[item5])]),
- User(id=8, orders=[]),
- User(id=9,
- orders=[
+ User(id=8, orders=[]),
+ User(id=9,
+ orders=[
Order(id=2,
items=[item1, item2, item3]),
Order(id=4,
items=[item1, item5])]),
- User(id=10, orders=[])]
+ User(id=10, orders=[])]
return user_result
-
-
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import assert_raises, eq_
+
class Company(fixtures.ComparableEntity):
pass
+
+
class Person(fixtures.ComparableEntity):
pass
+
+
class Engineer(Person):
pass
+
+
class Manager(Person):
pass
+
+
class Boss(Manager):
pass
+
+
class Machine(fixtures.ComparableEntity):
pass
+
+
class MachineType(fixtures.ComparableEntity):
pass
+
+
class Paperwork(fixtures.ComparableEntity):
pass
global companies, paperwork, machines
companies = Table('companies', metadata,
- Column('company_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('company_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
people = Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('company_id', Integer,
- ForeignKey('companies.company_id')),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('company_id', Integer,
+ ForeignKey('companies.company_id')),
+ Column('name', String(50)),
+ Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- Column('engineer_name', String(50)),
- Column('primary_language', String(50)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)),
+ Column('engineer_name', String(50)),
+ Column('primary_language', String(50)))
machines = Table('machines', metadata,
- Column('machine_id',
- Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('engineer_id', Integer,
- ForeignKey('engineers.person_id')))
+ Column('machine_id',
+ Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('engineer_id', Integer,
+ ForeignKey('engineers.person_id')))
managers = Table('managers', metadata,
- Column('person_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- Column('manager_name', String(50)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)),
+ Column('manager_name', String(50)))
boss = Table('boss', metadata,
- Column('boss_id', Integer,
- ForeignKey('managers.person_id'),
- primary_key=True),
- Column('golf_swing', String(30)))
+ Column('boss_id', Integer,
+ ForeignKey('managers.person_id'),
+ primary_key=True),
+ Column('golf_swing', String(30)))
paperwork = Table('paperwork', metadata,
- Column('paperwork_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('description', String(50)),
- Column('person_id', Integer,
- ForeignKey('people.person_id')))
+ Column('paperwork_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('description', String(50)),
+ Column('person_id', Integer,
+ ForeignKey('people.person_id')))
@classmethod
def insert_data(cls):
@classmethod
def setup_mappers(cls):
mapper(Company, companies,
- properties={
- 'employees':relationship(
- Person,
- order_by=people.c.person_id)})
+ properties={
+ 'employees': relationship(
+ Person,
+ order_by=people.c.person_id)})
mapper(Machine, machines)
manager_with_polymorphic = cls._get_polymorphics()
mapper(Person, people,
- with_polymorphic=person_with_polymorphic,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- properties={
- 'paperwork':relationship(
- Paperwork,
- order_by=paperwork.c.paperwork_id)})
+ with_polymorphic=person_with_polymorphic,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ properties={
+ 'paperwork': relationship(
+ Paperwork,
+ order_by=paperwork.c.paperwork_id)})
mapper(Engineer, engineers,
- inherits=Person,
- polymorphic_identity='engineer',
- properties={
- 'machines':relationship(
- Machine,
- order_by=machines.c.machine_id)})
+ inherits=Person,
+ polymorphic_identity='engineer',
+ properties={
+ 'machines': relationship(
+ Machine,
+ order_by=machines.c.machine_id)})
mapper(Manager, managers,
- with_polymorphic=manager_with_polymorphic,
- inherits=Person,
- polymorphic_identity='manager')
+ with_polymorphic=manager_with_polymorphic,
+ inherits=Person,
+ polymorphic_identity='manager')
mapper(Boss, boss,
- inherits=Manager,
- polymorphic_identity='boss')
+ inherits=Manager,
+ polymorphic_identity='boss')
mapper(Paperwork, paperwork)
+
class _Polymorphic(_PolymorphicFixtureBase):
select_type = ""
+
@classmethod
def _get_polymorphics(cls):
return None, None
+
class _PolymorphicPolymorphic(_PolymorphicFixtureBase):
select_type = "Polymorphic"
+
@classmethod
def _get_polymorphics(cls):
return '*', '*'
cls.tables.people, cls.tables.engineers, \
cls.tables.managers, cls.tables.boss
person_join = polymorphic_union({
- 'engineer':people.join(engineers),
- 'manager':people.join(managers)},
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers)},
None, 'pjoin')
manager_join = people.join(managers).outerjoin(boss)
person_with_polymorphic = (
def define_tables(cls, metadata):
global ta, tb, tc
ta = ["a", metadata]
- ta.append(Column('id', Integer, primary_key=True, test_needs_autoincrement=True)),
+ ta.append(Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)),
ta.append(Column('a_data', String(30)))
- if "a"== parent and direction == MANYTOONE:
- ta.append(Column('child_id', Integer, ForeignKey("%s.id" % child, use_alter=True, name="foo")))
+ if "a" == parent and direction == MANYTOONE:
+ ta.append(Column('child_id', Integer, ForeignKey(
+ "%s.id" % child, use_alter=True, name="foo")))
elif "a" == child and direction == ONETOMANY:
- ta.append(Column('parent_id', Integer, ForeignKey("%s.id" % parent, use_alter=True, name="foo")))
+ ta.append(Column('parent_id', Integer, ForeignKey(
+ "%s.id" % parent, use_alter=True, name="foo")))
ta = Table(*ta)
tb = ["b", metadata]
- tb.append(Column('id', Integer, ForeignKey("a.id"), primary_key=True, ))
+ tb.append(Column('id', Integer, ForeignKey(
+ "a.id"), primary_key=True, ))
tb.append(Column('b_data', String(30)))
- if "b"== parent and direction == MANYTOONE:
- tb.append(Column('child_id', Integer, ForeignKey("%s.id" % child, use_alter=True, name="foo")))
+ if "b" == parent and direction == MANYTOONE:
+ tb.append(Column('child_id', Integer, ForeignKey(
+ "%s.id" % child, use_alter=True, name="foo")))
elif "b" == child and direction == ONETOMANY:
- tb.append(Column('parent_id', Integer, ForeignKey("%s.id" % parent, use_alter=True, name="foo")))
+ tb.append(Column('parent_id', Integer, ForeignKey(
+ "%s.id" % parent, use_alter=True, name="foo")))
tb = Table(*tb)
tc = ["c", metadata]
- tc.append(Column('id', Integer, ForeignKey("b.id"), primary_key=True, ))
+ tc.append(Column('id', Integer, ForeignKey(
+ "b.id"), primary_key=True, ))
tc.append(Column('c_data', String(30)))
- if "c"== parent and direction == MANYTOONE:
- tc.append(Column('child_id', Integer, ForeignKey("%s.id" % child, use_alter=True, name="foo")))
+ if "c" == parent and direction == MANYTOONE:
+ tc.append(Column('child_id', Integer, ForeignKey(
+ "%s.id" % child, use_alter=True, name="foo")))
elif "c" == child and direction == ONETOMANY:
- tc.append(Column('parent_id', Integer, ForeignKey("%s.id" % parent, use_alter=True, name="foo")))
+ tc.append(Column('parent_id', Integer, ForeignKey(
+ "%s.id" % parent, use_alter=True, name="foo")))
tc = Table(*tc)
def teardown(self):
if direction == MANYTOONE:
- parent_table = {"a":ta, "b":tb, "c": tc}[parent]
- parent_table.update(values={parent_table.c.child_id:None}).execute()
+ parent_table = {"a": ta, "b": tb, "c": tc}[parent]
+ parent_table.update(
+ values={parent_table.c.child_id: None}).execute()
elif direction == ONETOMANY:
- child_table = {"a":ta, "b":tb, "c": tc}[child]
- child_table.update(values={child_table.c.parent_id:None}).execute()
+ child_table = {"a": ta, "b": tb, "c": tc}[child]
+ child_table.update(
+ values={child_table.c.parent_id: None}).execute()
super(ABCTest, self).teardown()
-
def test_roundtrip(self):
- parent_table = {"a":ta, "b":tb, "c": tc}[parent]
- child_table = {"a":ta, "b":tb, "c": tc}[child]
+ parent_table = {"a": ta, "b": tb, "c": tc}[parent]
+ child_table = {"a": ta, "b": tb, "c": tc}[child]
remote_side = None
elif direction == ONETOMANY:
foreign_keys = [child_table.c.parent_id]
- atob = ta.c.id==tb.c.id
- btoc = tc.c.id==tb.c.id
+ atob = ta.c.id == tb.c.id
+ btoc = tc.c.id == tb.c.id
if direction == ONETOMANY:
- relationshipjoin = parent_table.c.id==child_table.c.parent_id
+ relationshipjoin = parent_table.c.id == child_table.c.parent_id
elif direction == MANYTOONE:
- relationshipjoin = parent_table.c.child_id==child_table.c.id
+ relationshipjoin = parent_table.c.child_id == child_table.c.id
if parent is child:
remote_side = [child_table.c.id]
abcjoin = polymorphic_union(
- {"a":ta.select(tb.c.id==None, from_obj=[ta.outerjoin(tb, onclause=atob)]),
- "b":ta.join(tb, onclause=atob).outerjoin(tc, onclause=btoc).select(tc.c.id==None).reduce_columns(),
- "c":tc.join(tb, onclause=btoc).join(ta, onclause=atob)
- },"type", "abcjoin"
+ {"a": ta.select(tb.c.id == None, # noqa
+ from_obj=[ta.outerjoin(tb, onclause=atob)]),
+ "b": ta.join(tb, onclause=atob).outerjoin(tc, onclause=btoc)\
+ .select(tc.c.id == None).reduce_columns(), # noqa
+ "c": tc.join(tb, onclause=btoc).join(ta, onclause=atob)},
+ "type", "abcjoin"
)
bcjoin = polymorphic_union(
- {
- "b":ta.join(tb, onclause=atob).outerjoin(tc, onclause=btoc).select(tc.c.id==None).reduce_columns(),
- "c":tc.join(tb, onclause=btoc).join(ta, onclause=atob)
- },"type", "bcjoin"
+ {"b": ta.join(tb, onclause=atob).outerjoin(tc, onclause=btoc)
+ .select(tc.c.id == None).reduce_columns(), # noqa
+ "c": tc.join(tb, onclause=btoc).join(ta, onclause=atob)},
+ "type", "bcjoin"
)
+
class A(object):
def __init__(self, name):
self.a_data = name
- class B(A):pass
- class C(B):pass
- mapper(A, ta, polymorphic_on=abcjoin.c.type, with_polymorphic=('*', abcjoin), polymorphic_identity="a")
- mapper(B, tb, polymorphic_on=bcjoin.c.type, with_polymorphic=('*', bcjoin), polymorphic_identity="b", inherits=A, inherit_condition=atob)
- mapper(C, tc, polymorphic_identity="c", inherits=B, inherit_condition=btoc)
+ class B(A):
+ pass
+
+ class C(B):
+ pass
- parent_mapper = class_mapper({ta:A, tb:B, tc:C}[parent_table])
- child_mapper = class_mapper({ta:A, tb:B, tc:C}[child_table])
+ mapper(A, ta, polymorphic_on=abcjoin.c.type, with_polymorphic=(
+ '*', abcjoin), polymorphic_identity="a")
+ mapper(B, tb, polymorphic_on=bcjoin.c.type,
+ with_polymorphic=('*', bcjoin),
+ polymorphic_identity="b",
+ inherits=A,
+ inherit_condition=atob)
+ mapper(C, tc, polymorphic_identity="c",
+ inherits=B, inherit_condition=btoc)
+
+ parent_mapper = class_mapper({ta: A, tb: B, tc: C}[parent_table])
+ child_mapper = class_mapper({ta: A, tb: B, tc: C}[child_table])
parent_class = parent_mapper.class_
child_class = child_mapper.class_
- parent_mapper.add_property("collection",
- relationship(child_mapper,
- primaryjoin=relationshipjoin,
- foreign_keys=foreign_keys,
- order_by=child_mapper.c.id,
- remote_side=remote_side, uselist=True))
+ parent_mapper.add_property(
+ "collection",
+ relationship(child_mapper,
+ primaryjoin=relationshipjoin,
+ foreign_keys=foreign_keys,
+ order_by=child_mapper.c.id,
+ remote_side=remote_side, uselist=True))
sess = create_session()
someb = B('someb')
somec = C('somec')
- #print "APPENDING", parent.__class__.__name__ , "TO", child.__class__.__name__
+ # print "APPENDING", parent.__class__.__name__ , "TO",
+ # child.__class__.__name__
sess.add(parent_obj)
parent_obj.collection.append(child_obj)
assert result2.id == parent2.id
assert result2.collection[0].id == child_obj.id
- ABCTest.__name__ = "Test%sTo%s%s" % (parent, child, (direction is ONETOMANY and "O2M" or "M2O"))
+ ABCTest.__name__ = "Test%sTo%s%s" % (
+ parent, child, (direction is ONETOMANY and "O2M" or "M2O"))
return ABCTest
+
# test all combinations of polymorphic a/b/c related to another of a/b/c
for parent in ["a", "b", "c"]:
for child in ["a", "b", "c"]:
from sqlalchemy.testing import eq_
from sqlalchemy.testing.schema import Table, Column
+
class ABCTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global a, b, c
a = Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('adata', String(30)),
- Column('type', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('adata', String(30)),
+ Column('type', String(30)),
+ )
b = Table('b', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True),
- Column('bdata', String(30)))
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('bdata', String(30)))
c = Table('c', metadata,
- Column('id', Integer, ForeignKey('b.id'), primary_key=True),
- Column('cdata', String(30)))
+ Column('id', Integer, ForeignKey('b.id'), primary_key=True),
+ Column('cdata', String(30)))
def _make_test(fetchtype):
def test_roundtrip(self):
- class A(fixtures.ComparableEntity):pass
- class B(A):pass
- class C(B):pass
+ class A(fixtures.ComparableEntity):
+ pass
+
+ class B(A):
+ pass
+
+ class C(B):
+ pass
if fetchtype == 'union':
abc = a.outerjoin(b).outerjoin(c)
else:
abc = bc = None
- mapper(A, a, with_polymorphic=('*', abc), polymorphic_on=a.c.type, polymorphic_identity='a')
- mapper(B, b, with_polymorphic=('*', bc), inherits=A, polymorphic_identity='b')
+ mapper(A, a, with_polymorphic=('*', abc),
+ polymorphic_on=a.c.type, polymorphic_identity='a')
+ mapper(B, b, with_polymorphic=('*', bc),
+ inherits=A, polymorphic_identity='b')
mapper(C, c, inherits=B, polymorphic_identity='c')
a1 = A(adata='a1')
sess.flush()
sess.expunge_all()
- #for obj in sess.query(A).all():
+ # for obj in sess.query(A).all():
# print obj
- eq_(
- [
- A(adata='a1'),
- B(bdata='b1', adata='b1'),
- B(bdata='b2', adata='b2'),
- B(bdata='b3', adata='b3'),
- C(cdata='c1', bdata='c1', adata='c1'),
- C(cdata='c2', bdata='c2', adata='c2'),
- C(cdata='c2', bdata='c2', adata='c2'),
- ], sess.query(A).order_by(A.id).all())
+ eq_([A(adata='a1'),
+ B(bdata='b1', adata='b1'),
+ B(bdata='b2', adata='b2'),
+ B(bdata='b3', adata='b3'),
+ C(cdata='c1', bdata='c1', adata='c1'),
+ C(cdata='c2', bdata='c2', adata='c2'),
+ C(cdata='c2', bdata='c2', adata='c2')],
+ sess.query(A).order_by(A.id).all())
eq_([
B(bdata='b1', adata='b1'),
test_union = _make_test('union')
test_none = _make_test('none')
-
-
class AttrSettable(object):
def __init__(self, **kwargs):
[setattr(self, k, v) for k, v in kwargs.items()]
+
def __repr__(self):
return self.__class__.__name__ + "(%s)" % (hex(id(self)))
global people, managers
people = Table('people', metadata,
- Column('person_id', Integer, Sequence('person_id_seq',
- optional=True),
- primary_key=True),
- Column('manager_id', Integer,
- ForeignKey('managers.person_id',
- use_alter=True, name="mpid_fq")),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer, Sequence('person_id_seq',
+ optional=True),
+ primary_key=True),
+ Column('manager_id', Integer,
+ ForeignKey('managers.person_id',
+ use_alter=True, name="mpid_fq")),
+ Column('name', String(50)),
+ Column('type', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- Column('manager_name', String(50))
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)),
+ Column('manager_name', String(50))
+ )
def teardown(self):
- people.update(values={people.c.manager_id:None}).execute()
+ people.update(values={people.c.manager_id: None}).execute()
super(RelationshipTest1, self).teardown()
def test_parent_refs_descendant(self):
class Person(AttrSettable):
pass
+
class Manager(Person):
pass
mapper(Person, people, properties={
- 'manager':relationship(Manager, primaryjoin=(
- people.c.manager_id ==
- managers.c.person_id),
- uselist=False, post_update=True)
+ 'manager': relationship(Manager, primaryjoin=(
+ people.c.manager_id ==
+ managers.c.person_id),
+ uselist=False, post_update=True)
})
mapper(Manager, managers, inherits=Person,
- inherit_condition=people.c.person_id==managers.c.person_id)
+ inherit_condition=people.c.person_id == managers.c.person_id)
eq_(class_mapper(Person).get_property('manager').synchronize_pairs,
- [(managers.c.person_id,people.c.manager_id)])
+ [(managers.c.person_id, people.c.manager_id)])
session = create_session()
p = Person(name='some person')
def test_descendant_refs_parent(self):
class Person(AttrSettable):
pass
+
class Manager(Person):
pass
mapper(Person, people)
mapper(Manager, managers, inherits=Person,
- inherit_condition=people.c.person_id==
- managers.c.person_id,
- properties={
- 'employee':relationship(Person, primaryjoin=(
- people.c.manager_id ==
- managers.c.person_id),
- foreign_keys=[people.c.manager_id],
- uselist=False, post_update=True)
- })
+ inherit_condition=people.c.person_id ==
+ managers.c.person_id,
+ properties={
+ 'employee': relationship(Person, primaryjoin=(
+ people.c.manager_id ==
+ managers.c.person_id),
+ foreign_keys=[people.c.manager_id],
+ uselist=False, post_update=True)})
session = create_session()
p = Person(name='some person')
m = session.query(Manager).get(m.person_id)
assert m.employee is p
+
class RelationshipTest2(fixtures.MappedTest):
"""test self-referential relationships on polymorphic mappers"""
@classmethod
def define_tables(cls, metadata):
global people, managers, data
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('manager_id', Integer, ForeignKey('people.person_id')),
- Column('status', String(30)),
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('manager_id', Integer,
+ ForeignKey('people.person_id')),
+ Column('status', String(30)))
data = Table('data', metadata,
- Column('person_id', Integer, ForeignKey('managers.person_id'),
- primary_key=True),
- Column('data', String(30))
- )
+ Column('person_id', Integer,
+ ForeignKey('managers.person_id'),
+ primary_key=True),
+ Column('data', String(30)))
def test_relationshiponsubclass_j1_nodata(self):
self._do_test("join1", False)
+
def test_relationshiponsubclass_j2_nodata(self):
self._do_test("join2", False)
+
def test_relationshiponsubclass_j1_data(self):
self._do_test("join1", True)
+
def test_relationshiponsubclass_j2_data(self):
self._do_test("join2", True)
+
def test_relationshiponsubclass_j3_nodata(self):
self._do_test("join3", False)
+
def test_relationshiponsubclass_j3_data(self):
self._do_test("join3", True)
def _do_test(self, jointype="join1", usedata=False):
class Person(AttrSettable):
pass
+
class Manager(Person):
pass
if jointype == "join1":
poly_union = polymorphic_union({
- 'person':people.select(people.c.type=='person'),
- 'manager':join(people, managers,
- people.c.person_id==managers.c.person_id)
+ 'person': people.select(people.c.type == 'person'),
+ 'manager': join(people, managers,
+ people.c.person_id == managers.c.person_id)
}, None)
- polymorphic_on=poly_union.c.type
+ polymorphic_on = poly_union.c.type
elif jointype == "join2":
poly_union = polymorphic_union({
- 'person':people.select(people.c.type=='person'),
- 'manager':managers.join(people,
- people.c.person_id==managers.c.person_id)
+ 'person': people.select(people.c.type == 'person'),
+ 'manager': managers.join(
+ people,
+ people.c.person_id == managers.c.person_id)
}, None)
- polymorphic_on=poly_union.c.type
+ polymorphic_on = poly_union.c.type
elif jointype == "join3":
poly_union = None
polymorphic_on = people.c.type
mapper(Data, data)
mapper(Person, people,
- with_polymorphic=('*', poly_union),
- polymorphic_identity='person',
- polymorphic_on=polymorphic_on)
+ with_polymorphic=('*', poly_union),
+ polymorphic_identity='person',
+ polymorphic_on=polymorphic_on)
if usedata:
mapper(Manager, managers,
- inherits=Person,
- inherit_condition=people.c.person_id==
- managers.c.person_id,
- polymorphic_identity='manager',
- properties={
- 'colleague':relationship(
- Person,
- primaryjoin=managers.c.manager_id==
- people.c.person_id,
- lazy='select', uselist=False),
- 'data':relationship(Data, uselist=False)
- }
- )
+ inherits=Person,
+ inherit_condition=people.c.person_id ==
+ managers.c.person_id,
+ polymorphic_identity='manager',
+ properties={
+ 'colleague': relationship(
+ Person,
+ primaryjoin=managers.c.manager_id ==
+ people.c.person_id,
+ lazy='select', uselist=False),
+ 'data': relationship(Data, uselist=False)})
else:
mapper(Manager, managers, inherits=Person,
- inherit_condition=people.c.person_id==
- managers.c.person_id,
- polymorphic_identity='manager',
- properties={
- 'colleague':relationship(Person,
- primaryjoin=managers.c.manager_id==
- people.c.person_id,
- lazy='select', uselist=False)
- }
- )
+ inherit_condition=people.c.person_id ==
+ managers.c.person_id,
+ polymorphic_identity='manager',
+ properties={
+ 'colleague': relationship(
+ Person,
+ primaryjoin=managers.c.manager_id ==
+ people.c.person_id,
+ lazy='select', uselist=False)})
sess = create_session()
p = Person(name='person1')
if usedata:
assert m.data.data == 'ms data'
+
class RelationshipTest3(fixtures.MappedTest):
"""test self-referential relationships on polymorphic mappers"""
@classmethod
def define_tables(cls, metadata):
global people, managers, data
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('colleague_id', Integer, ForeignKey('people.person_id')),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('colleague_id', Integer,
+ ForeignKey('people.person_id')),
+ Column('name', String(50)),
+ Column('type', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)))
data = Table('data', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('data', String(30))
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('data', String(30)))
+
def _generate_test(jointype="join1", usedata=False):
def _do_test(self):
class Person(AttrSettable):
pass
+
class Manager(Person):
pass
if jointype == "join1":
poly_union = polymorphic_union({
- 'manager':managers.join(people,
- people.c.person_id==managers.c.person_id),
- 'person':people.select(people.c.type=='person')
+ 'manager': managers.join(
+ people,
+ people.c.person_id == managers.c.person_id),
+ 'person': people.select(people.c.type == 'person')
}, None)
- elif jointype =="join2":
+ elif jointype == "join2":
poly_union = polymorphic_union({
- 'manager':join(people, managers,
- people.c.person_id==managers.c.person_id),
- 'person':people.select(people.c.type=='person')
+ 'manager': join(people, managers,
+ people.c.person_id == managers.c.person_id),
+ 'person': people.select(people.c.type == 'person')
}, None)
elif jointype == 'join3':
poly_union = people.outerjoin(managers)
elif jointype == "join4":
- poly_union=None
+ poly_union = None
if usedata:
mapper(Data, data)
if usedata:
mapper(Person, people,
- with_polymorphic=('*', poly_union),
- polymorphic_identity='person',
- polymorphic_on=people.c.type,
- properties={
- 'colleagues':relationship(Person,
- primaryjoin=people.c.colleague_id==
- people.c.person_id,
- remote_side=people.c.colleague_id,
- uselist=True),
- 'data':relationship(Data, uselist=False)
- }
- )
+ with_polymorphic=('*', poly_union),
+ polymorphic_identity='person',
+ polymorphic_on=people.c.type,
+ properties={
+ 'colleagues': relationship(
+ Person,
+ primaryjoin=people.c.colleague_id ==
+ people.c.person_id,
+ remote_side=people.c.colleague_id,
+ uselist=True),
+ 'data': relationship(Data, uselist=False)})
else:
mapper(Person, people,
- with_polymorphic=('*', poly_union),
- polymorphic_identity='person',
- polymorphic_on=people.c.type,
- properties={
- 'colleagues':relationship(Person,
- primaryjoin=people.c.colleague_id==people.c.person_id,
- remote_side=people.c.colleague_id, uselist=True)
- }
- )
+ with_polymorphic=('*', poly_union),
+ polymorphic_identity='person',
+ polymorphic_on=people.c.type,
+ properties={
+ 'colleagues': relationship(
+ Person,
+ primaryjoin=people.c.colleague_id ==
+ people.c.person_id,
+ remote_side=people.c.colleague_id, uselist=True)})
mapper(Manager, managers, inherits=Person,
- inherit_condition=people.c.person_id==
- managers.c.person_id,
- polymorphic_identity='manager')
+ inherit_condition=people.c.person_id ==
+ managers.c.person_id,
+ polymorphic_identity='manager')
sess = create_session()
p = Person(name='person1')
do_test = function_named(
_do_test, 'test_relationship_on_base_class_%s_%s' % (
- jointype, data and "nodata" or "data"))
+ jointype, data and "nodata" or "data"))
return do_test
+
for jointype in ["join1", "join2", "join3", "join4"]:
for data in (True, False):
func = _generate_test(jointype, data)
setattr(RelationshipTest3, func.__name__, func)
del func
+
class RelationshipTest4(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, engineers, managers, cars
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('longer_status', String(70)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('longer_status', String(70)))
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('owner', Integer, ForeignKey('people.person_id')))
+ Column('car_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('owner', Integer, ForeignKey('people.person_id')))
def test_many_to_one_polymorphic(self):
"""in this test, the polymorphic union is between two subclasses, but
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
+
def __repr__(self):
return "Ordinary person %s" % self.name
+
class Engineer(Person):
def __repr__(self):
return "Engineer %s, status %s" % \
- (self.name, self.status)
+ (self.name, self.status)
+
class Manager(Person):
def __repr__(self):
return "Manager %s, status %s" % \
- (self.name, self.longer_status)
+ (self.name, self.longer_status)
+
class Car(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
+
def __repr__(self):
return "Car number %d" % self.car_id
# create a union that represents both types of joins.
employee_join = polymorphic_union(
{
- 'engineer':people.join(engineers),
- 'manager':people.join(managers),
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers),
}, "type", 'employee_join')
- person_mapper = mapper(Person, people,
- with_polymorphic=('*', employee_join),
- polymorphic_on=employee_join.c.type,
- polymorphic_identity='person')
+ person_mapper = mapper(Person, people,
+ with_polymorphic=('*', employee_join),
+ polymorphic_on=employee_join.c.type,
+ polymorphic_identity='person')
engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
- polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
- polymorphic_identity='manager')
- car_mapper = mapper(Car, cars,
- properties= {'employee':
- relationship(person_mapper)})
+ inherits=person_mapper,
+ polymorphic_identity='engineer')
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
+ polymorphic_identity='manager')
+ car_mapper = mapper(Car, cars,
+ properties={'employee':
+ relationship(person_mapper)})
session = create_session()
# creating 5 managers named from M1 to E5
- for i in range(1,5):
+ for i in range(1, 5):
session.add(Manager(name="M%d" % i,
longer_status="YYYYYYYYY"))
# creating 5 engineers named from E1 to E5
- for i in range(1,5):
- session.add(Engineer(name="E%d" % i,status="X"))
+ for i in range(1, 5):
+ session.add(Engineer(name="E%d" % i, status="X"))
session.flush()
engineer4 = session.query(Engineer).\
- filter(Engineer.name=="E4").first()
+ filter(Engineer.name == "E4").first()
manager3 = session.query(Manager).\
- filter(Manager.name=="M3").first()
+ filter(Manager.name == "M3").first()
car1 = Car(employee=engineer4)
session.add(car1)
def go():
testcar = session.query(Car).options(
- joinedload('employee')
- ).get(car1.car_id)
+ joinedload('employee')
+ ).get(car1.car_id)
assert str(testcar.employee) == "Engineer E4, status X"
self.assert_sql_count(testing.db, go, 1)
def go():
testcar = session.query(Car).options(
- joinedload('employee')
- ).get(car1.car_id)
+ joinedload('employee')
+ ).get(car1.car_id)
assert str(testcar.employee) == "Engineer E4, status X"
self.assert_sql_count(testing.db, go, 1)
session.expunge_all()
s = session.query(Car)
- c = s.join("employee").filter(Person.name=="E4")[0]
- assert c.car_id==car1.car_id
+ c = s.join("employee").filter(Person.name == "E4")[0]
+ assert c.car_id == car1.car_id
+
class RelationshipTest5(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, engineers, managers, cars
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(50)))
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
primary_key=True),
- Column('longer_status', String(70)))
+ Column('longer_status', String(70)))
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('owner', Integer, ForeignKey('people.person_id')))
+ Column('car_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('owner', Integer, ForeignKey('people.person_id')))
def test_eager_empty(self):
"""test parent object with child relationship to an inheriting mapper,
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
+
def __repr__(self):
return "Ordinary person %s" % self.name
+
class Engineer(Person):
def __repr__(self):
return "Engineer %s, status %s" % \
- (self.name, self.status)
+ (self.name, self.status)
+
class Manager(Person):
def __repr__(self):
return "Manager %s, status %s" % \
- (self.name, self.longer_status)
+ (self.name, self.longer_status)
+
class Car(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
+
def __repr__(self):
return "Car number %d" % self.car_id
- person_mapper = mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person')
+ person_mapper = mapper(Person, people,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
- polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
- polymorphic_identity='manager')
- car_mapper = mapper(Car, cars, properties= {
- 'manager':relationship(
- manager_mapper, lazy='joined')})
+ inherits=person_mapper,
+ polymorphic_identity='engineer')
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
+ polymorphic_identity='manager')
+ car_mapper = mapper(Car, cars, properties={
+ 'manager': relationship(
+ manager_mapper, lazy='joined')})
sess = create_session()
car1 = Car()
assert carlist[0].manager is None
assert carlist[1].manager.person_id == car2.manager.person_id
+
class RelationshipTest6(fixtures.MappedTest):
"""test self-referential relationships on a single joined-table
inheritance mapper"""
def define_tables(cls, metadata):
global people, managers, data
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- )
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ )
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
primary_key=True),
- Column('colleague_id', Integer,
- ForeignKey('managers.person_id')),
- Column('status', String(30)),
- )
+ Column('colleague_id', Integer,
+ ForeignKey('managers.person_id')),
+ Column('status', String(30)),
+ )
def test_basic(self):
class Person(AttrSettable):
pass
+
class Manager(Person):
pass
mapper(Person, people)
mapper(Manager, managers, inherits=Person,
- inherit_condition=people.c.person_id==\
- managers.c.person_id,
- properties={
- 'colleague':relationship(Manager,
- primaryjoin=managers.c.colleague_id==\
- managers.c.person_id,
- lazy='select', uselist=False)
- }
- )
+ inherit_condition=people.c.person_id ==
+ managers.c.person_id,
+ properties={
+ 'colleague': relationship(
+ Manager,
+ primaryjoin=managers.c.colleague_id ==
+ managers.c.person_id,
+ lazy='select', uselist=False)})
sess = create_session()
m = Manager(name='manager1')
- m2 =Manager(name='manager2')
+ m2 = Manager(name='manager2')
m.colleague = m2
sess.add(m)
sess.flush()
m2 = sess.query(Manager).get(m2.person_id)
assert m.colleague is m2
+
class RelationshipTest7(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, engineers, managers, cars, offroad_cars
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(30)))
+ Column('car_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)))
offroad_cars = Table('offroad_cars', metadata,
- Column('car_id',Integer, ForeignKey('cars.car_id'),
- nullable=False,primary_key=True))
+ Column('car_id', Integer,
+ ForeignKey('cars.car_id'),
+ nullable=False, primary_key=True))
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('car_id', Integer, ForeignKey('cars.car_id'),
- nullable=False),
- Column('name', String(50)))
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('car_id', Integer, ForeignKey('cars.car_id'),
+ nullable=False),
+ Column('name', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('field', String(30)))
-
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('field', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('category', String(70)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('category', String(70)))
def test_manytoone_lazyload(self):
"""test that lazy load clause to a polymorphic child mapper generates
class Engineer(Person):
def __repr__(self):
return "Engineer %s, field %s" % (self.name,
- self.field)
+ self.field)
class Manager(Person):
def __repr__(self):
return "Manager %s, category %s" % (self.name,
- self.category)
+ self.category)
class Car(PersistentObject):
def __repr__(self):
return "Car number %d, name %s" % \
- (self.car_id, self.name)
+ (self.car_id, self.name)
class Offraod_Car(Car):
def __repr__(self):
return "Offroad Car number %d, name %s" % \
- (self.car_id,self.name)
+ (self.car_id, self.name)
employee_join = polymorphic_union(
- {
- 'engineer':people.join(engineers),
- 'manager':people.join(managers),
- }, "type", 'employee_join')
+ {
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers),
+ }, "type", 'employee_join')
car_join = polymorphic_union(
{
- 'car' : cars.outerjoin(offroad_cars).\
- select(offroad_cars.c.car_id == None).reduce_columns(),
- 'offroad' : cars.join(offroad_cars)
+ 'car': cars.outerjoin(offroad_cars).
+ select(offroad_cars.c.car_id == None).reduce_columns(), # noqa
+ 'offroad': cars.join(offroad_cars)
}, "type", 'car_join')
- car_mapper = mapper(Car, cars,
- with_polymorphic=('*', car_join) ,polymorphic_on=car_join.c.type,
- polymorphic_identity='car',
- )
+ car_mapper = mapper(Car, cars,
+ with_polymorphic=('*', car_join),
+ polymorphic_on=car_join.c.type,
+ polymorphic_identity='car')
offroad_car_mapper = mapper(Offraod_Car, offroad_cars,
- inherits=car_mapper, polymorphic_identity='offroad')
+ inherits=car_mapper,
+ polymorphic_identity='offroad')
person_mapper = mapper(Person, people,
- with_polymorphic=('*', employee_join),
- polymorphic_on=employee_join.c.type,
- polymorphic_identity='person',
- properties={
- 'car':relationship(car_mapper)
- })
+ with_polymorphic=('*', employee_join),
+ polymorphic_on=employee_join.c.type,
+ polymorphic_identity='person',
+ properties={
+ 'car': relationship(car_mapper)})
engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
- polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
- polymorphic_identity='manager')
+ inherits=person_mapper,
+ polymorphic_identity='engineer')
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
+ polymorphic_identity='manager')
session = create_session()
- basic_car=Car(name="basic")
- offroad_car=Offraod_Car(name="offroad")
+ basic_car = Car(name="basic")
+ offroad_car = Offraod_Car(name="offroad")
- for i in range(1,4):
- if i%2:
- car=Car()
+ for i in range(1, 4):
+ if i % 2:
+ car = Car()
else:
- car=Offraod_Car()
+ car = Offraod_Car()
session.add(Manager(name="M%d" % i,
- category="YYYYYYYYY",car=car))
- session.add(Engineer(name="E%d" % i,field="X",car=car))
+ category="YYYYYYYYY", car=car))
+ session.add(Engineer(name="E%d" % i, field="X", car=car))
session.flush()
session.expunge_all()
for p in r:
assert p.car_id == p.car.car_id
+
class RelationshipTest8(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global taggable, users
taggable = Table('taggable', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('type', String(30)),
- Column('owner_id', Integer, ForeignKey('taggable.id')),
+ Column('owner_id', Integer,
+ ForeignKey('taggable.id')),
)
- users = Table ('users', metadata,
- Column('id', Integer, ForeignKey('taggable.id'),
- primary_key=True),
- Column('data', String(50)),
- )
+ users = Table('users', metadata,
+ Column('id', Integer, ForeignKey('taggable.id'),
+ primary_key=True),
+ Column('data', String(50)))
def test_selfref_onjoined(self):
class Taggable(fixtures.ComparableEntity):
class User(Taggable):
pass
- mapper( Taggable, taggable,
- polymorphic_on=taggable.c.type,
- polymorphic_identity='taggable',
- properties = {
- 'owner' : relationship (User,
- primaryjoin=taggable.c.owner_id ==taggable.c.id,
- remote_side=taggable.c.id
- ),
- })
-
+ mapper(Taggable, taggable,
+ polymorphic_on=taggable.c.type,
+ polymorphic_identity='taggable',
+ properties={
+ 'owner': relationship(
+ User,
+ primaryjoin=taggable.c.owner_id == taggable.c.id,
+ remote_side=taggable.c.id)})
mapper(User, users, inherits=Taggable,
- polymorphic_identity='user',
- inherit_condition=users.c.id == taggable.c.id,
- )
-
+ polymorphic_identity='user',
+ inherit_condition=users.c.id == taggable.c.id)
u1 = User(data='u1')
t1 = Taggable(owner=u1)
[User(data='u1'), Taggable(owner=User(data='u1'))]
)
+
class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
metadata = MetaData(testing.db)
# table definitions
status = Table('status', metadata,
- Column('status_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(20)))
-
- people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('status_id', Integer, ForeignKey('status.status_id'),
- nullable=False),
- Column('name', String(50)))
-
- engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('field', String(30)))
-
- managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('category', String(70)))
-
- cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('status_id', Integer, ForeignKey('status.status_id'),
- nullable=False),
- Column('owner', Integer, ForeignKey('people.person_id'),
- nullable=False))
+ Column('status_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(20)))
+
+ people = Table(
+ 'people', metadata,
+ Column(
+ 'person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column(
+ 'status_id', Integer, ForeignKey('status.status_id'),
+ nullable=False),
+ Column('name', String(50)))
+
+ engineers = Table(
+ 'engineers', metadata,
+ Column(
+ 'person_id', Integer, ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('field', String(30)))
+
+ managers = Table(
+ 'managers', metadata,
+ Column(
+ 'person_id', Integer, ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('category', String(70)))
+
+ cars = Table(
+ 'cars', metadata,
+ Column(
+ 'car_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column(
+ 'status_id', Integer, ForeignKey('status.status_id'),
+ nullable=False),
+ Column(
+ 'owner', Integer, ForeignKey('people.person_id'),
+ nullable=False))
metadata.create_all()
@classmethod
def teardown_class(cls):
metadata.drop_all()
+
def teardown(self):
clear_mappers()
for t in reversed(metadata.sorted_tables):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
+
class Status(PersistentObject):
def __repr__(self):
return "Status %s" % self.name
+
class Person(PersistentObject):
def __repr__(self):
return "Ordinary person %s" % self.name
+
class Engineer(Person):
def __repr__(self):
return "Engineer %s, field %s, status %s" % (
- self.name, self.field, self.status)
+ self.name, self.field, self.status)
+
class Manager(Person):
def __repr__(self):
return "Manager %s, category %s, status %s" % (
- self.name, self.category, self.status)
+ self.name, self.category, self.status)
+
class Car(PersistentObject):
def __repr__(self):
return "Car number %d" % self.car_id
# create a union that represents both types of joins.
employee_join = polymorphic_union(
{
- 'engineer':people.join(engineers),
- 'manager':people.join(managers),
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers),
}, "type", 'employee_join')
- status_mapper = mapper(Status, status)
- person_mapper = mapper(Person, people,
- with_polymorphic=('*', employee_join),
- polymorphic_on=employee_join.c.type,
- polymorphic_identity='person',
- properties={'status':relationship(status_mapper)})
+ status_mapper = mapper(Status, status)
+ person_mapper = mapper(
+ Person, people, with_polymorphic=('*', employee_join),
+ polymorphic_on=employee_join.c.type, polymorphic_identity='person',
+ properties={'status': relationship(status_mapper)})
engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
- polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
- polymorphic_identity='manager')
- car_mapper = mapper(Car, cars, properties= {
- 'employee':relationship(person_mapper),
- 'status':relationship(status_mapper)})
+ inherits=person_mapper,
+ polymorphic_identity='engineer')
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
+ polymorphic_identity='manager')
+ car_mapper = mapper(Car, cars, properties={
+ 'employee': relationship(person_mapper),
+ 'status': relationship(status_mapper)})
session = create_session()
# creating 5 managers named from M1 to M5
# and 5 engineers named from E1 to E5
# M4, M5, E4 and E5 are dead
- for i in range(1,5):
- if i<4:
- st=active
+ for i in range(1, 5):
+ if i < 4:
+ st = active
else:
- st=dead
+ st = dead
session.add(Manager(name="M%d" % i,
- category="YYYYYYYYY",status=st))
- session.add(Engineer(name="E%d" % i,field="X",status=st))
+ category="YYYYYYYYY", status=st))
+ session.add(Engineer(name="E%d" % i, field="X", status=st))
session.flush()
# get E4
- engineer4 = session.query(engineer_mapper).\
- filter_by(name="E4").one()
+ engineer4 = session.query(engineer_mapper).filter_by(name="E4").one()
# create 2 cars for E4, one active and one dead
- car1 = Car(employee=engineer4,status=active)
- car2 = Car(employee=engineer4,status=dead)
+ car1 = Car(employee=engineer4, status=active)
+ car2 = Car(employee=engineer4, status=dead)
session.add(car1)
session.add(car2)
session.flush()
# this particular adapt used to cause a recursion overflow;
# added here for testing
- e = exists([Car.owner], Car.owner==employee_join.c.person_id)
+ e = exists([Car.owner], Car.owner == employee_join.c.person_id)
Query(Person)._adapt_clause(employee_join, False, False)
r = session.query(Person).filter(Person.name.like('%2')).\
- join('status').\
- filter_by(name="active").\
- order_by(Person.person_id)
+ join('status').\
+ filter_by(name="active").\
+ order_by(Person.person_id)
eq_(str(list(r)), "[Manager M2, category YYYYYYYYY, status "
- "Status active, Engineer E2, field X, "
- "status Status active]")
+ "Status active, Engineer E2, field X, "
+ "status Status active]")
r = session.query(Engineer).join('status').\
- filter(Person.name.in_(
- ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) &
- (status.c.name=="active")).order_by(Person.name)
+ filter(Person.name.in_(
+ ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) &
+ (status.c.name == "active")).order_by(Person.name)
eq_(str(list(r)), "[Engineer E2, field X, status Status "
- "active, Engineer E3, field X, status "
- "Status active]")
+ "active, Engineer E3, field X, status "
+ "Status active]")
r = session.query(Person).filter(exists([1],
- Car.owner==Person.person_id))
+ Car.owner == Person.person_id))
eq_(str(list(r)), "[Engineer E4, field X, status Status dead]")
+
class MultiLevelTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global table_Employee, table_Engineer, table_Manager
- table_Employee = Table( 'Employee', metadata,
- Column( 'name', type_= String(100), ),
- Column( 'id', primary_key= True, type_= Integer,
- test_needs_autoincrement=True),
- Column( 'atype', type_= String(100), ),
- )
-
- table_Engineer = Table( 'Engineer', metadata,
- Column( 'machine', type_= String(100), ),
- Column( 'id', Integer, ForeignKey( 'Employee.id', ),
- primary_key= True),
- )
-
- table_Manager = Table( 'Manager', metadata,
- Column( 'duties', type_= String(100), ),
- Column( 'id', Integer, ForeignKey( 'Engineer.id', ),
- primary_key= True, ),
- )
+ table_Employee = Table('Employee', metadata,
+ Column('name', type_=String(100), ),
+ Column('id', primary_key=True, type_=Integer,
+ test_needs_autoincrement=True),
+ Column('atype', type_=String(100), ),
+ )
+
+ table_Engineer = Table(
+ 'Engineer', metadata, Column('machine', type_=String(100),),
+ Column(
+ 'id', Integer, ForeignKey('Employee.id',),
+ primary_key=True),)
+
+ table_Manager = Table('Manager', metadata,
+ Column('duties', type_=String(100),),
+ Column('id', Integer, ForeignKey('Engineer.id'),
+ primary_key=True))
def test_threelevels(self):
- class Employee( object):
- def set( me, **kargs):
- for k,v in kargs.items(): setattr( me, k, v)
+ class Employee(object):
+ def set(me, **kargs):
+ for k, v in kargs.items():
+ setattr(me, k, v)
return me
+
def __str__(me):
- return str(me.__class__.__name__)+':'+str(me.name)
+ return str(me.__class__.__name__) + ':' + str(me.name)
__repr__ = __str__
+
class Engineer(Employee):
pass
+
class Manager(Engineer):
pass
- pu_Employee = polymorphic_union( {
- 'Manager': table_Employee.join(
- table_Engineer).join( table_Manager),
- 'Engineer': select([table_Employee,
- table_Engineer.c.machine],
- table_Employee.c.atype == 'Engineer',
- from_obj=[
- table_Employee.join(table_Engineer)]),
- 'Employee': table_Employee.select(
- table_Employee.c.atype == 'Employee'),
- }, None, 'pu_employee', )
-
- mapper_Employee = mapper( Employee, table_Employee,
- polymorphic_identity= 'Employee',
- polymorphic_on= pu_Employee.c.atype,
- with_polymorphic=('*', pu_Employee),
- )
-
- pu_Engineer = polymorphic_union( {
- 'Manager': table_Employee.join( table_Engineer).
- join( table_Manager),
- 'Engineer': select([table_Employee,
- table_Engineer.c.machine],
- table_Employee.c.atype == 'Engineer',
- from_obj=[
- table_Employee.join(table_Engineer)
- ]),
- }, None, 'pu_engineer', )
- mapper_Engineer = mapper( Engineer, table_Engineer,
- inherit_condition= table_Engineer.c.id == \
- table_Employee.c.id,
- inherits= mapper_Employee,
- polymorphic_identity= 'Engineer',
- polymorphic_on= pu_Engineer.c.atype,
- with_polymorphic=('*', pu_Engineer),
- )
-
- mapper_Manager = mapper( Manager, table_Manager,
- inherit_condition= table_Manager.c.id == \
- table_Engineer.c.id,
- inherits= mapper_Engineer,
- polymorphic_identity= 'Manager',
- )
-
- a = Employee().set( name= 'one')
- b = Engineer().set( egn= 'two', machine= 'any')
- c = Manager().set( name= 'head', machine= 'fast',
- duties= 'many')
+ pu_Employee = polymorphic_union({
+ 'Manager': table_Employee.join(
+ table_Engineer).join(table_Manager),
+ 'Engineer': select([table_Employee,
+ table_Engineer.c.machine],
+ table_Employee.c.atype == 'Engineer',
+ from_obj=[
+ table_Employee.join(table_Engineer)]),
+ 'Employee': table_Employee.select(
+ table_Employee.c.atype == 'Employee')
+ }, None, 'pu_employee')
+
+ mapper_Employee = mapper(Employee, table_Employee,
+ polymorphic_identity='Employee',
+ polymorphic_on=pu_Employee.c.atype,
+ with_polymorphic=('*', pu_Employee),
+ )
+
+ pu_Engineer = polymorphic_union({
+ 'Manager': table_Employee.join(table_Engineer).
+ join(table_Manager),
+ 'Engineer': select([table_Employee,
+ table_Engineer.c.machine],
+ table_Employee.c.atype == 'Engineer',
+ from_obj=[
+ table_Employee.join(table_Engineer)])
+ }, None, 'pu_engineer')
+ mapper_Engineer = mapper(Engineer, table_Engineer,
+ inherit_condition=table_Engineer.c.id ==
+ table_Employee.c.id,
+ inherits=mapper_Employee,
+ polymorphic_identity='Engineer',
+ polymorphic_on=pu_Engineer.c.atype,
+ with_polymorphic=('*', pu_Engineer))
+
+ mapper_Manager = mapper(Manager, table_Manager,
+ inherit_condition=table_Manager.c.id ==
+ table_Engineer.c.id,
+ inherits=mapper_Engineer,
+ polymorphic_identity='Manager')
+
+ a = Employee().set(name='one')
+ b = Engineer().set(egn='two', machine='any')
+ c = Manager().set(name='head', machine='fast', duties='many')
session = create_session()
session.add(a)
session.add(b)
session.add(c)
session.flush()
- assert set(session.query(Employee).all()) == set([a,b,c])
- assert set(session.query( Engineer).all()) == set([b,c])
- assert session.query( Manager).all() == [c]
+ assert set(session.query(Employee).all()) == set([a, b, c])
+ assert set(session.query(Engineer).all()) == set([b, c])
+ assert session.query(Manager).all() == [c]
+
class ManyToManyPolyTest(fixtures.MappedTest):
@classmethod
base_item_table = Table(
'base_item', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('child_name', String(255), default=None))
item_table = Table(
'item', metadata,
Column('id', Integer, ForeignKey('base_item.id'),
- primary_key=True),
+ primary_key=True),
Column('dummy', Integer, default=0))
base_item_collection_table = Table(
collection_table = Table(
'collection', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('name', Unicode(255)))
def test_pjoin_compile(self):
arent attempted to be matched to the target polymorphic
selectable"""
- class BaseItem(object): pass
- class Item(BaseItem): pass
- class Collection(object): pass
- item_join = polymorphic_union( {
- 'BaseItem':base_item_table.select(
- base_item_table.c.child_name=='BaseItem'),
- 'Item':base_item_table.join(item_table),
- }, None, 'item_join')
+ class BaseItem(object):
+ pass
+
+ class Item(BaseItem):
+ pass
+
+ class Collection(object):
+ pass
+ item_join = polymorphic_union({
+ 'BaseItem': base_item_table.select(
+ base_item_table.c.child_name == 'BaseItem'),
+ 'Item': base_item_table.join(item_table),
+ }, None, 'item_join')
mapper(
- BaseItem, base_item_table,
- with_polymorphic=('*', item_join),
+ BaseItem, base_item_table, with_polymorphic=('*', item_join),
polymorphic_on=base_item_table.c.child_name,
polymorphic_identity='BaseItem',
- properties=dict(collections=relationship(Collection,
- secondary=base_item_collection_table,
- backref="items")))
+ properties=dict(
+ collections=relationship(
+ Collection, secondary=base_item_collection_table,
+ backref="items")))
mapper(
Item, item_table,
class_mapper(BaseItem)
+
class CustomPKTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global t1, t2
t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(30), nullable=False),
- Column('data', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(30), nullable=False),
+ Column('data', String(30)))
# note that the primary key column in t2 is named differently
t2 = Table('t2', metadata,
- Column('t2id', Integer, ForeignKey('t1.id'), primary_key=True),
- Column('t2data', String(30)))
+ Column('t2id', Integer, ForeignKey(
+ 't1.id'), primary_key=True),
+ Column('t2data', String(30)))
def test_custompk(self):
"""test that the primary_key attribute is propagated to the
polymorphic mapper"""
- class T1(object):pass
- class T2(T1):pass
+ class T1(object):
+ pass
+
+ class T2(T1):
+ pass
- # create a polymorphic union with the select against the base table first.
- # with the join being second, the alias of the union will
- # pick up two "primary key" columns. technically the alias should have a
- # 2-col pk in any case but the leading select has a NULL for the "t2id" column
+ # create a polymorphic union with the select against the base table
+ # first. with the join being second, the alias of the union will
+ # pick up two "primary key" columns. technically the alias should have
+ # a 2-col pk in any case but the leading select has a NULL for the
+ # "t2id" column
d = util.OrderedDict()
- d['t1'] = t1.select(t1.c.type=='t1')
+ d['t1'] = t1.select(t1.c.type == 't1')
d['t2'] = t1.join(t2)
pjoin = polymorphic_union(d, None, 'pjoin')
mapper(T1, t1, polymorphic_on=t1.c.type,
- polymorphic_identity='t1',
- with_polymorphic=('*', pjoin),
- primary_key=[pjoin.c.id])
+ polymorphic_identity='t1',
+ with_polymorphic=('*', pjoin),
+ primary_key=[pjoin.c.id])
mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
ot1 = T1()
ot2 = T2()
"""test that a composite primary key attribute formed by a join
is "collapsed" into its minimal columns"""
- class T1(object):pass
- class T2(T1):pass
+ class T1(object):
+ pass
+
+ class T2(T1):
+ pass
- # create a polymorphic union with the select against the base table first.
- # with the join being second, the alias of the union will
- # pick up two "primary key" columns. technically the alias should have a
- # 2-col pk in any case but the leading select has a NULL for the "t2id" column
+ # create a polymorphic union with the select against the base table
+ # first. with the join being second, the alias of the union will
+ # pick up two "primary key" columns. technically the alias should have
+ # a 2-col pk in any case but the leading select has a NULL for the
+ # "t2id" column
d = util.OrderedDict()
- d['t1'] = t1.select(t1.c.type=='t1')
+ d['t1'] = t1.select(t1.c.type == 't1')
d['t2'] = t1.join(t2)
pjoin = polymorphic_union(d, None, 'pjoin')
mapper(T1, t1, polymorphic_on=t1.c.type,
- polymorphic_identity='t1',
- with_polymorphic=('*', pjoin))
+ polymorphic_identity='t1',
+ with_polymorphic=('*', pjoin))
mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
assert len(class_mapper(T1).primary_key) == 1
ot1.data = 'hi'
sess.flush()
+
class InheritingEagerTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, employees, tags, peopleTags
people = Table('people', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('_type', String(30), nullable=False),
- )
-
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('_type', String(30), nullable=False))
employees = Table('employees', metadata,
- Column('id', Integer, ForeignKey('people.id'),
- primary_key=True),
- )
+ Column('id', Integer, ForeignKey('people.id'),
+ primary_key=True))
tags = Table('tags', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('label', String(50), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('label', String(50), nullable=False))
peopleTags = Table('peopleTags', metadata,
- Column('person_id', Integer,
- ForeignKey('people.id')),
- Column('tag_id', Integer,
- ForeignKey('tags.id')),
- )
+ Column('person_id', Integer,
+ ForeignKey('people.id')),
+ Column('tag_id', Integer,
+ ForeignKey('tags.id')))
def test_basic(self):
"""test that Query uses the full set of mapper._eager_loaders
pass
class Employee(Person):
- def __init__(self, name='bob'):
- self.name = name
+ def __init__(self, name='bob'):
+ self.name = name
class Tag(fixtures.ComparableEntity):
- def __init__(self, label):
- self.label = label
+ def __init__(self, label):
+ self.label = label
mapper(Person, people, polymorphic_on=people.c._type,
- polymorphic_identity='person', properties={
- 'tags': relationship(Tag,
- secondary=peopleTags,
- backref='people', lazy='joined')
- })
+ polymorphic_identity='person', properties={
+ 'tags': relationship(Tag,
+ secondary=peopleTags,
+ backref='people', lazy='joined')})
mapper(Employee, employees, inherits=Person,
- polymorphic_identity='employee')
+ polymorphic_identity='employee')
mapper(Tag, tags)
session = create_session()
session.flush()
session.expunge_all()
- # query from Employee with limit, query needs to apply eager limiting subquery
+ # query from Employee with limit, query needs to apply eager limiting
+ # subquery
instance = session.query(Employee).\
- filter_by(id=1).limit(1).first()
+ filter_by(id=1).limit(1).first()
assert len(instance.tags) == 2
+
class MissingPolymorphicOnTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
tablea = Table('tablea', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('adata', String(50)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('adata', String(50)))
tableb = Table('tableb', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('aid', Integer, ForeignKey('tablea.id')),
- Column('data', String(50)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('aid', Integer, ForeignKey('tablea.id')),
+ Column('data', String(50)))
tablec = Table('tablec', metadata,
- Column('id', Integer, ForeignKey('tablea.id'),
- primary_key=True),
- Column('cdata', String(50)),
- )
+ Column('id', Integer, ForeignKey('tablea.id'),
+ primary_key=True),
+ Column('cdata', String(50)))
tabled = Table('tabled', metadata,
- Column('id', Integer, ForeignKey('tablec.id'),
- primary_key=True),
- Column('ddata', String(50)),
- )
+ Column('id', Integer, ForeignKey('tablec.id'),
+ primary_key=True),
+ Column('ddata', String(50)))
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
+
class C(A):
pass
+
class D(C):
pass
A, B, C, D = self.classes.A, self.classes.B, self.classes.C, \
self.classes.D
poly_select = select(
- [tablea, tableb.c.data.label('discriminator')],
- from_obj=tablea.join(tableb)).alias('poly')
+ [tablea, tableb.c.data.label('discriminator')],
+ from_obj=tablea.join(tableb)).alias('poly')
mapper(B, tableb)
mapper(A, tablea,
- with_polymorphic=('*', poly_select),
- polymorphic_on=poly_select.c.discriminator,
- properties={
- 'b':relationship(B, uselist=False)
- })
- mapper(C, tablec, inherits=A,polymorphic_identity='c')
+ with_polymorphic=('*', poly_select),
+ polymorphic_on=poly_select.c.discriminator,
+ properties={'b': relationship(B, uselist=False)})
+ mapper(C, tablec, inherits=A, polymorphic_identity='c')
mapper(D, tabled, inherits=C, polymorphic_identity='d')
c = C(cdata='c1', adata='a1', b=B(data='c'))
]
)
+
class JoinedInhAdjacencyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('people', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(30)))
Table('users', metadata,
Column('id', Integer, ForeignKey('people.id'),
- primary_key=True),
- Column('supervisor_id', Integer, ForeignKey('people.id')),
- )
+ primary_key=True),
+ Column('supervisor_id', Integer, ForeignKey('people.id')))
Table('dudes', metadata,
- Column('id', Integer, ForeignKey('users.id'),
- primary_key=True),
- )
+ Column('id', Integer, ForeignKey('users.id'), primary_key=True))
@classmethod
def setup_classes(cls):
Person, User = self.classes.Person, self.classes.User
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- )
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(User, users, inherits=Person,
- polymorphic_identity='user',
- inherit_condition=(users.c.id == people.c.id),
- properties = {
- 'supervisor': relationship(Person,
- primaryjoin=users.c.supervisor_id==people.c.id,
- ),
- }
- )
+ polymorphic_identity='user',
+ inherit_condition=(users.c.id == people.c.id),
+ properties={
+ 'supervisor': relationship(
+ Person,
+ primaryjoin=users.c.supervisor_id == people.c.id)})
assert User.supervisor.property.direction is MANYTOONE
self._roundtrip()
Person, User = self.classes.Person, self.classes.User
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- )
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(User, users, inherits=Person,
- polymorphic_identity='user',
- inherit_condition=(users.c.id == people.c.id),
- properties = {
- 'supervisor': relationship(User,
- primaryjoin=users.c.supervisor_id==people.c.id,
- remote_side=people.c.id,
- foreign_keys=[users.c.supervisor_id]
- ),
- }
- )
+ polymorphic_identity='user',
+ inherit_condition=(users.c.id == people.c.id),
+ properties={
+ 'supervisor': relationship(
+ User,
+ primaryjoin=users.c.supervisor_id == people.c.id,
+ remote_side=people.c.id,
+ foreign_keys=[
+ users.c.supervisor_id])})
assert User.supervisor.property.direction is MANYTOONE
self._roundtrip()
self.classes.Dude
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- )
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(User, users, inherits=Person,
- polymorphic_identity='user',
- inherit_condition=(users.c.id == people.c.id),
- )
+ polymorphic_identity='user',
+ inherit_condition=(users.c.id == people.c.id))
mapper(Dude, dudes, inherits=User,
- polymorphic_identity='dude',
- inherit_condition=(dudes.c.id==users.c.id),
- properties={
- 'supervisor': relationship(User,
- primaryjoin=users.c.supervisor_id==people.c.id,
- remote_side=people.c.id,
- foreign_keys=[users.c.supervisor_id]
- ),
- }
- )
+ polymorphic_identity='dude',
+ inherit_condition=(dudes.c.id == users.c.id),
+ properties={
+ 'supervisor': relationship(
+ User,
+ primaryjoin=users.c.supervisor_id == people.c.id,
+ remote_side=people.c.id,
+ foreign_keys=[
+ users.c.supervisor_id])})
assert Dude.supervisor.property.direction is MANYTOONE
self._dude_roundtrip()
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
+
class A(Base):
__tablename__ = "a"
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
b_id = Column(Integer, ForeignKey('b.id'))
- b = relationship("B", primaryjoin=b_id==B.id)
+ b = relationship("B", primaryjoin=b_id == B.id)
class D(Base):
__tablename__ = "d"
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
b_id = Column(Integer, ForeignKey('b.id'))
class E(Base):
__tablename__ = 'e'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
b_id = Column(Integer, ForeignKey('b.id'))
@testing.fails_on("oracle",
- "seems like oracle's query engine can't "
- "handle this, not clear if there's an "
- "expression-level bug on our end though")
+ "seems like oracle's query engine can't "
+ "handle this, not clear if there's an "
+ "expression-level bug on our end though")
def test_join_w_eager_w_any(self):
- A, B, C, D, E = self.classes.A, self.classes.B, \
- self.classes.C, self.classes.D, \
- self.classes.E
+ A, B, C, D, E = (self.classes.A,
+ self.classes.B,
+ self.classes.C,
+ self.classes.D,
+ self.classes.E)
s = Session(testing.db)
b = B(ds=[D()])
s.commit()
- q = s.query(B, B.ds.any(D.id==1)).options(joinedload_all("es"))
- q = q.join(C, C.b_id==B.id)
+ q = s.query(B, B.ds.any(D.id == 1)).options(joinedload_all("es"))
+ q = q.join(C, C.b_id == B.id)
q = q.limit(5)
eq_(
q.all(),
[(b, True)]
)
-class ColSubclassTest(fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL):
+
+class ColSubclassTest(fixtures.DeclarativeMappedTest,
+ testing.AssertsCompiledSQL):
"""Test [ticket:2918]'s test case."""
run_create_tables = run_deletes = None
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.testing.util import gc_collect
+
class O2MTest(fixtures.MappedTest):
"""deals with inheritance and one-to-many relationships"""
@classmethod
def define_tables(cls, metadata):
global foo, bar, blub
foo = Table('foo', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(20)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(20)))
bar = Table('bar', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- Column('bar_data', String(20)))
+ Column('id', Integer, ForeignKey(
+ 'foo.id'), primary_key=True),
+ Column('bar_data', String(20)))
blub = Table('blub', metadata,
- Column('id', Integer, ForeignKey('bar.id'), primary_key=True),
- Column('foo_id', Integer, ForeignKey('foo.id'), nullable=False),
- Column('blub_data', String(20)))
+ Column('id', Integer, ForeignKey(
+ 'bar.id'), primary_key=True),
+ Column('foo_id', Integer, ForeignKey(
+ 'foo.id'), nullable=False),
+ Column('blub_data', String(20)))
def test_basic(self):
class Foo(object):
def __init__(self, data=None):
self.data = data
+
def __repr__(self):
return "Foo id %d, data %s" % (self.id, self.data)
mapper(Foo, foo)
return "Blub id %d, data %s" % (self.id, self.data)
mapper(Blub, blub, inherits=Bar, properties={
- 'parent_foo':relationship(Foo)
+ 'parent_foo': relationship(Foo)
})
sess = create_session()
b2.parent_foo = f
sess.flush()
compare = ','.join([repr(b1), repr(b2), repr(b1.parent_foo),
- repr(b2.parent_foo)])
+ repr(b2.parent_foo)])
sess.expunge_all()
- l = sess.query(Blub).all()
- result = ','.join([repr(l[0]), repr(l[1]),
- repr(l[0].parent_foo), repr(l[1].parent_foo)])
- eq_(compare, result)
- eq_(l[0].parent_foo.data, 'foo #1')
- eq_(l[1].parent_foo.data, 'foo #1')
+ result = sess.query(Blub).all()
+ result_str = ','.join([repr(result[0]), repr(result[1]),
+ repr(result[0].parent_foo),
+ repr(result[1].parent_foo)])
+ eq_(compare, result_str)
+ eq_(result[0].parent_foo.data, 'foo #1')
+ eq_(result[1].parent_foo.data, 'foo #1')
class PolyExpressionEagerLoad(fixtures.DeclarativeMappedTest):
result = session.query(A).filter_by(child_id=None).\
options(joinedload('child')).one()
-
eq_(
result,
A(id=1, discriminator='a', child=[B(id=2), B(id=3)]),
class PolymorphicResolutionMultiLevel(fixtures.DeclarativeMappedTest,
- testing.AssertsCompiledSQL):
+ testing.AssertsCompiledSQL):
run_setup_mappers = 'once'
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
+
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
+
class B(A):
__tablename__ = 'b'
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
+
class C(A):
__tablename__ = 'c'
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
+
class D(B):
__tablename__ = 'd'
id = Column(Integer, ForeignKey('b.id'), primary_key=True)
a_mapper = inspect(self.classes.A)
eq_(
a_mapper._mappers_from_spec(
- [self.classes.B, self.classes.D], None),
+ [self.classes.B, self.classes.D], None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
def test_a(self):
a_mapper = inspect(self.classes.A)
- eq_(
- a_mapper._mappers_from_spec(
- [self.classes.A], None),
- [a_mapper]
- )
+ eq_(a_mapper._mappers_from_spec([self.classes.A], None), [a_mapper])
def test_b_d_selectable(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.B]
eq_(
a_mapper._mappers_from_spec(
- spec,
- self.classes.B.__table__.join(self.classes.D.__table__)
+ spec,
+ self.classes.B.__table__.join(self.classes.D.__table__)
),
[inspect(self.classes.B), inspect(self.classes.D)]
)
spec = [self.classes.D]
eq_(
a_mapper._mappers_from_spec(
- spec,
- self.classes.B.__table__.join(self.classes.D.__table__)
+ spec,
+ self.classes.B.__table__.join(self.classes.D.__table__)
),
[inspect(self.classes.D)]
)
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.B]
eq_(
- a_mapper._mappers_from_spec(
- spec, None),
+ a_mapper._mappers_from_spec(spec, None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
mappers, selectable = a_mapper._with_polymorphic_args(spec=spec)
self.assert_compile(selectable,
- "a LEFT OUTER JOIN b ON a.id = b.id "
- "LEFT OUTER JOIN d ON b.id = d.id")
+ "a LEFT OUTER JOIN b ON a.id = b.id "
+ "LEFT OUTER JOIN d ON b.id = d.id")
def test_d_b_missing(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D]
eq_(
- a_mapper._mappers_from_spec(
- spec, None),
+ a_mapper._mappers_from_spec(spec, None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
mappers, selectable = a_mapper._with_polymorphic_args(spec=spec)
self.assert_compile(selectable,
- "a LEFT OUTER JOIN b ON a.id = b.id "
- "LEFT OUTER JOIN d ON b.id = d.id")
+ "a LEFT OUTER JOIN b ON a.id = b.id "
+ "LEFT OUTER JOIN d ON b.id = d.id")
def test_d_c_b(self):
a_mapper = inspect(self.classes.A)
set(ms[1:3]), set(a_mapper._inheriting_mappers)
)
+
class PolymorphicOnNotLocalTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x', String(10)),
- Column('q', String(10)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', String(10)),
+ Column('q', String(10)))
t2 = Table('t2', metadata,
- Column('t2id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('y', String(10)),
- Column('xid', ForeignKey('t1.id')))
+ Column('t2id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('y', String(10)),
+ Column('xid', ForeignKey('t1.id')))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
+
class Child(Parent):
pass
Parent = self.classes.Parent
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
+
def go():
interface_m = mapper(Parent, t2,
- polymorphic_on=lambda:"hi",
- polymorphic_identity=0)
+ polymorphic_on=lambda: "hi",
+ polymorphic_identity=0)
assert_raises_message(
sa_exc.ArgumentError,
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
+
def go():
t1t2_join_2 = select([t1.c.q], from_obj=[t1.join(t2)]).alias()
interface_m = mapper(Parent, t2,
- polymorphic_on=t1t2_join.c.x,
- with_polymorphic=('*', t1t2_join_2),
- polymorphic_identity=0)
+ polymorphic_on=t1t2_join.c.x,
+ with_polymorphic=('*', t1t2_join_2),
+ polymorphic_identity=0)
assert_raises_message(
sa_exc.InvalidRequestError,
"Could not map polymorphic_on column 'x' to the mapped table - "
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
# if its in the with_polymorphic, then its OK
mapper(Parent, t2,
- polymorphic_on=t1t2_join.c.x,
- with_polymorphic=('*', t1t2_join),
- polymorphic_identity=0)
+ polymorphic_on=t1t2_join.c.x,
+ with_polymorphic=('*', t1t2_join),
+ polymorphic_identity=0)
def test_polymorpic_on_not_in_with_poly(self):
t2, t1 = self.tables.t2, self.tables.t1
def go():
t1t2_join_2 = select([t1.c.q], from_obj=[t1.join(t2)]).alias()
interface_m = mapper(Parent, t2,
- polymorphic_on=t1t2_join.c.x,
- with_polymorphic=('*', t1t2_join_2),
- polymorphic_identity=0)
+ polymorphic_on=t1t2_join.c.x,
+ with_polymorphic=('*', t1t2_join_2),
+ polymorphic_identity=0)
assert_raises_message(
sa_exc.InvalidRequestError,
"Could not map polymorphic_on column 'x' "
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
])
mapper(Parent, t1, properties={
- "discriminator":column_property(expr)
+ "discriminator": column_property(expr)
}, polymorphic_identity="parent",
polymorphic_on=expr)
mapper(Child, t2, inherits=Parent,
- polymorphic_identity="child")
+ polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
])
mapper(Parent, t1, polymorphic_identity="parent",
- polymorphic_on=expr)
+ polymorphic_on=expr)
mapper(Child, t2, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
]).label(None)
mapper(Parent, t1, polymorphic_identity="parent",
- polymorphic_on=expr)
+ polymorphic_on=expr)
mapper(Child, t2, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
])
mapper(Parent, t1, polymorphic_identity="parent",
- polymorphic_on=expr)
+ polymorphic_on=expr)
mapper(Child, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
]).label(None)
mapper(Parent, t1, polymorphic_identity="parent",
- polymorphic_on=expr)
+ polymorphic_on=expr)
mapper(Child, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
])
cprop = column_property(expr)
mapper(Parent, t1, properties={
- "discriminator":cprop
+ "discriminator": cprop
}, polymorphic_identity="parent",
polymorphic_on=cprop)
mapper(Child, t2, inherits=Parent,
- polymorphic_identity="child")
+ polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
- (t1.c.x=="p", "parent"),
- (t1.c.x=="c", "child"),
+ (t1.c.x == "p", "parent"),
+ (t1.c.x == "c", "child"),
])
cprop = column_property(expr)
mapper(Parent, t1, properties={
- "discriminator":cprop
+ "discriminator": cprop
}, polymorphic_identity="parent",
polymorphic_on="discriminator")
mapper(Child, t2, inherits=Parent,
- polymorphic_identity="child")
+ polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
"Only direct column-mapped property or "
"SQL expression can be passed for polymorphic_on",
mapper, Parent, t1, properties={
- "discriminator":cprop,
- "discrim_syn":synonym(cprop)
- }, polymorphic_identity="parent",
+ "discriminator": cprop,
+ "discrim_syn": synonym(cprop)
+ }, polymorphic_identity="parent",
polymorphic_on="discrim_syn")
- def _roundtrip(self, set_event=True, parent_ident='parent', child_ident='child'):
+ def _roundtrip(self, set_event=True, parent_ident='parent',
+ child_ident='child'):
Parent, Child = self.classes.Parent, self.classes.Child
# locate the "polymorphic_on" ColumnProperty. This isn't
[Child]
)
+
class SortOnlyOnImportantFKsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('b_id', Integer,
- ForeignKey('b.id', use_alter=True, name='b'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('b_id', Integer,
+ ForeignKey('b.id', use_alter=True, name='b')))
Table('b', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True)
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True))
@classmethod
def setup_classes(cls):
__tablename__ = "a"
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
b_id = Column(Integer, ForeignKey('b.id'))
class B(A):
s.add(self.classes.B())
s.flush()
+
class FalseDiscriminatorTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global t1
t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('type', Boolean, nullable=False))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', Boolean, nullable=False))
def test_false_on_sub(self):
class Foo(object):
pass
+
class Bar(Foo):
pass
mapper(Foo, t1, polymorphic_on=t1.c.type, polymorphic_identity=True)
assert isinstance(sess.query(Foo).one(), Bar)
def test_false_on_base(self):
- class Ding(object):pass
- class Bat(Ding):pass
+ class Ding(object):
+ pass
+
+ class Bat(Ding):
+ pass
mapper(Ding, t1, polymorphic_on=t1.c.type, polymorphic_identity=False)
mapper(Bat, inherits=Ding, polymorphic_identity=True)
sess = create_session()
sess.expunge_all()
assert sess.query(Ding).one() is not None
+
class PolymorphicSynonymTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global t1, t2
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('type', String(10), nullable=False),
Column('info', String(255)))
t2 = Table('t2', metadata,
Column('id', Integer, ForeignKey('t1.id'),
- primary_key=True),
+ primary_key=True),
Column('data', String(10), nullable=False))
def test_polymorphic_synonym(self):
class T1(fixtures.ComparableEntity):
def info(self):
return "THE INFO IS:" + self._info
+
def _set_info(self, x):
self._info = x
info = property(info, _set_info)
- class T2(T1):pass
+ class T2(T1):
+ pass
mapper(T1, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1',
- properties={
- 'info':synonym('_info', map_column=True)
- })
+ properties={'info': synonym('_info', map_column=True)})
mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
sess = create_session()
at1 = T1(info='at1')
sess.add(at2)
sess.flush()
sess.expunge_all()
- eq_(sess.query(T2).filter(T2.info=='at2').one(), at2)
+ eq_(sess.query(T2).filter(T2.info == 'at2').one(), at2)
eq_(at2.info, "THE INFO IS:at2")
+
class PolymorphicAttributeManagementTest(fixtures.MappedTest):
"""Test polymorphic_on can be assigned, can be mirrored, etc."""
@classmethod
def define_tables(cls, metadata):
Table('table_a', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('class_name', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('class_name', String(50)))
Table('table_b', metadata,
- Column('id', Integer, ForeignKey('table_a.id'),
- primary_key=True),
- Column('class_name', String(50)),
- )
+ Column('id', Integer, ForeignKey('table_a.id'),
+ primary_key=True),
+ Column('class_name', String(50)))
Table('table_c', metadata,
- Column('id', Integer, ForeignKey('table_b.id'),
- primary_key=True),
- Column('data', String(10))
- )
+ Column('id', Integer, ForeignKey('table_b.id'),
+ primary_key=True),
+ Column('data', String(10)))
@classmethod
def setup_classes(cls):
table_b, table_c, table_a = (cls.tables.table_b,
- cls.tables.table_c,
- cls.tables.table_a)
+ cls.tables.table_c,
+ cls.tables.table_a)
class A(cls.Basic):
pass
+
class B(A):
pass
+
class C(B):
pass
+
class D(B):
pass
mapper(A, table_a,
- polymorphic_on=table_a.c.class_name,
- polymorphic_identity='a')
- mapper(B, table_b, inherits=A,
- polymorphic_on=table_b.c.class_name,
- polymorphic_identity='b',
- properties=dict(class_name=[table_a.c.class_name, table_b.c.class_name]))
- mapper(C, table_c, inherits=B,
- polymorphic_identity='c')
- mapper(D, inherits=B,
- polymorphic_identity='d')
+ polymorphic_on=table_a.c.class_name,
+ polymorphic_identity='a')
+ mapper(B, table_b, inherits=A, polymorphic_on=table_b.c.class_name,
+ polymorphic_identity='b',
+ properties=dict(
+ class_name=[table_a.c.class_name, table_b.c.class_name]))
+ mapper(C, table_c, inherits=B, polymorphic_identity='c')
+ mapper(D, inherits=B, polymorphic_identity='d')
def test_poly_configured_immediate(self):
- A, C, B = (self.classes.A,
- self.classes.C,
- self.classes.B)
+ A, C, B = (self.classes.A, self.classes.C, self.classes.B)
a = A()
b = B()
eq_(c.class_name, 'c')
def test_base_class(self):
- A, C, B = (self.classes.A,
- self.classes.C,
- self.classes.B)
+ A, C, B = (self.classes.A, self.classes.C, self.classes.B)
sess = Session()
c1 = C()
sess.flush
)
+
class CascadeTest(fixtures.MappedTest):
"""that cascades on polymorphic relationships continue
cascading along the path of the instance's mapper, not
@classmethod
def define_tables(cls, metadata):
global t1, t2, t3, t4
- t1= Table('t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30))
- )
+ t1 = Table('t1', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
t2 = Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('t1id', Integer, ForeignKey('t1.id')),
- Column('type', String(30)),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('t1id', Integer, ForeignKey('t1.id')),
+ Column('type', String(30)),
+ Column('data', String(30)))
t3 = Table('t3', metadata,
- Column('id', Integer, ForeignKey('t2.id'),
- primary_key=True),
- Column('moredata', String(30)))
+ Column('id', Integer, ForeignKey('t2.id'),
+ primary_key=True),
+ Column('moredata', String(30)))
t4 = Table('t4', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('t3id', Integer, ForeignKey('t3.id')),
- Column('data', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('t3id', Integer, ForeignKey('t3.id')),
+ Column('data', String(30)))
def test_cascade(self):
class T1(fixtures.BasicEntity):
pass
+
class T2(fixtures.BasicEntity):
pass
+
class T3(T2):
pass
+
class T4(fixtures.BasicEntity):
pass
mapper(T1, t1, properties={
- 't2s':relationship(T2, cascade="all")
+ 't2s': relationship(T2, cascade="all")
})
mapper(T2, t2, polymorphic_on=t2.c.type, polymorphic_identity='t2')
mapper(T3, t3, inherits=T2, polymorphic_identity='t3', properties={
- 't4s':relationship(T4, cascade="all")
+ 't4s': relationship(T4, cascade="all")
})
mapper(T4, t4)
sess = create_session()
t1_1 = T1(data='t1')
- t3_1 = T3(data ='t3', moredata='t3')
+ t3_1 = T3(data='t3', moredata='t3')
t2_1 = T2(data='t2')
t1_1.t2s.append(t2_1)
sess.add(t1_1)
-
assert t4_1 in sess.new
sess.flush()
assert t4_1 in sess.deleted
sess.flush()
+
class M2OUseGetTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('type', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(30)))
Table('sub', metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True))
Table('related', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('sub_id', Integer, ForeignKey('sub.id')),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('sub_id', Integer, ForeignKey('sub.id')))
def test_use_get(self):
base, sub, related = (self.tables.base,
- self.tables.sub,
- self.tables.related)
+ self.tables.sub,
+ self.tables.related)
# test [ticket:1186]
class Base(fixtures.BasicEntity):
pass
+
class Sub(Base):
pass
+
class Related(Base):
pass
- mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='b')
+ mapper(Base, base, polymorphic_on=base.c.type,
+ polymorphic_identity='b')
mapper(Sub, sub, inherits=Base, polymorphic_identity='s')
mapper(Related, related, properties={
# previously, this was needed for the comparison to occur:
- # the 'primaryjoin' looks just like "Sub"'s "get" clause (based on the Base id),
- # and foreign_keys since that join condition doesn't actually have any fks in it
- #'sub':relationship(Sub, primaryjoin=base.c.id==related.c.sub_id, foreign_keys=related.c.sub_id)
+ # the 'primaryjoin' looks just like "Sub"'s "get" clause
+ # (based on the Base id), and foreign_keys since that join
+ # condition doesn't actually have any fks in it
+ # 'sub':relationship(Sub, primaryjoin=base.c.id==related.c.sub_id,
+ # foreign_keys=related.c.sub_id)
# now we can use this:
- 'sub':relationship(Sub)
+ 'sub': relationship(Sub)
})
assert class_mapper(Related).get_property('sub').strategy.use_get
r1 = sess.query(Related).first()
s1 = sess.query(Sub).first()
+
def go():
assert r1.sub
self.assert_sql_count(testing.db, go, 0)
def define_tables(cls, metadata):
global foo, bar, blub
foo = Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('type', String(30)),
- Column('data', String(20)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(30)),
+ Column('data', String(20)))
bar = Table('bar', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- Column('bar_data', String(20)))
+ Column('id', Integer, ForeignKey(
+ 'foo.id'), primary_key=True),
+ Column('bar_data', String(20)))
blub = Table('blub', metadata,
- Column('blub_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('foo_id', Integer, ForeignKey('foo.id')),
- Column('bar_id', Integer, ForeignKey('bar.id')),
- Column('blub_data', String(20)))
+ Column('blub_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo_id', Integer, ForeignKey('foo.id')),
+ Column('bar_id', Integer, ForeignKey('bar.id')),
+ Column('blub_data', String(20)))
@classmethod
def setup_classes(cls):
def _do_get_test(self, polymorphic):
foo, Bar, Blub, blub, bar, Foo = (self.tables.foo,
- self.classes.Bar,
- self.classes.Blub,
- self.tables.blub,
- self.tables.bar,
- self.classes.Foo)
+ self.classes.Bar,
+ self.classes.Blub,
+ self.tables.blub,
+ self.tables.bar,
+ self.classes.Foo)
if polymorphic:
- mapper(Foo, foo, polymorphic_on=foo.c.type, polymorphic_identity='foo')
+ mapper(Foo, foo, polymorphic_on=foo.c.type,
+ polymorphic_identity='foo')
mapper(Bar, bar, inherits=Foo, polymorphic_identity='bar')
mapper(Blub, blub, inherits=Bar, polymorphic_identity='blub')
else:
assert sess.query(Foo).get(f.id) is f
bb = sess.query(Foo).get(b.id)
- assert isinstance(b, Foo) and bb.id==b.id
+ assert isinstance(b, Foo) and bb.id == b.id
bll = sess.query(Foo).get(bl.id)
- assert isinstance(bll, Foo) and bll.id==bl.id
+ assert isinstance(bll, Foo) and bll.id == bl.id
assert sess.query(Bar).get(b.id) is b
class EagerLazyTest(fixtures.MappedTest):
- """tests eager load/lazy load of child items off inheritance mappers, tests that
- LazyLoader constructs the right query condition."""
+ """tests eager load/lazy load of child items off inheritance mappers, tests
+ that LazyLoader constructs the right query condition."""
@classmethod
def define_tables(cls, metadata):
global foo, bar, bar_foo
foo = Table('foo', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)))
bar = Table('bar', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
+ Column('id', Integer, ForeignKey(
+ 'foo.id'), primary_key=True),
Column('bar_data', String(30)))
bar_foo = Table('bar_foo', metadata,
Column('bar_id', Integer, ForeignKey('bar.id')),
- Column('foo_id', Integer, ForeignKey('foo.id'))
- )
+ Column('foo_id', Integer, ForeignKey('foo.id')))
def test_basic(self):
- class Foo(object): pass
- class Bar(Foo): pass
+ class Foo(object):
+ pass
+
+ class Bar(Foo):
+ pass
foos = mapper(Foo, foo)
bars = mapper(Bar, bar, inherits=foos)
foo.insert().execute(data='foo2')
bar.insert().execute(id=2, data='bar2')
- foo.insert().execute(data='foo3') #3
- foo.insert().execute(data='foo4') #4
+ foo.insert().execute(data='foo3') # 3
+ foo.insert().execute(data='foo4') # 4
bar_foo.insert().execute(bar_id=1, foo_id=3)
bar_foo.insert().execute(bar_id=2, foo_id=4)
self.assert_(len(q.first().lazy) == 1)
self.assert_(len(q.first().eager) == 1)
+
class EagerTargetingTest(fixtures.MappedTest):
"""test a scenario where joined table inheritance might be
confused as an eagerly loaded joined table."""
@classmethod
def define_tables(cls, metadata):
Table('a_table', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(50)),
- Column('type', String(30), nullable=False),
- Column('parent_id', Integer, ForeignKey('a_table.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)),
+ Column('type', String(30), nullable=False),
+ Column('parent_id', Integer, ForeignKey('a_table.id')))
Table('b_table', metadata,
- Column('id', Integer, ForeignKey('a_table.id'), primary_key=True),
- Column('b_data', String(50)),
- )
+ Column('id', Integer, ForeignKey(
+ 'a_table.id'), primary_key=True),
+ Column('b_data', String(50)))
def test_adapt_stringency(self):
b_table, a_table = self.tables.b_table, self.tables.a_table
class A(fixtures.ComparableEntity):
pass
+
class B(A):
pass
- mapper(A, a_table, polymorphic_on=a_table.c.type, polymorphic_identity='A',
- properties={
- 'children': relationship(A, order_by=a_table.c.name)
- })
+ mapper(
+ A, a_table, polymorphic_on=a_table.c.type,
+ polymorphic_identity='A',
+ properties={'children': relationship(A, order_by=a_table.c.name)})
mapper(B, b_table, inherits=A, polymorphic_identity='B', properties={
- 'b_derived':column_property(b_table.c.b_data + "DATA")
- })
+ 'b_derived': column_property(b_table.c.b_data + "DATA")
+ })
- sess=create_session()
+ sess = create_session()
- b1=B(id=1, name='b1',b_data='i')
+ b1 = B(id=1, name='b1', b_data='i')
sess.add(b1)
sess.flush()
- b2=B(id=2, name='b2', b_data='l', parent_id=1)
+ b2 = B(id=2, name='b2', b_data='l', parent_id=1)
sess.add(b2)
sess.flush()
- bid=b1.id
+ bid = b1.id
sess.expunge_all()
- node = sess.query(B).filter(B.id==bid).all()[0]
- eq_(node, B(id=1, name='b1',b_data='i'))
- eq_(node.children[0], B(id=2, name='b2',b_data='l'))
+ node = sess.query(B).filter(B.id == bid).all()[0]
+ eq_(node, B(id=1, name='b1', b_data='i'))
+ eq_(node.children[0], B(id=2, name='b2', b_data='l'))
sess.expunge_all()
- node = sess.query(B).options(joinedload(B.children)).filter(B.id==bid).all()[0]
- eq_(node, B(id=1, name='b1',b_data='i'))
- eq_(node.children[0], B(id=2, name='b2',b_data='l'))
+ node = sess.query(B).options(joinedload(B.children))\
+ .filter(B.id == bid).all()[0]
+ eq_(node, B(id=1, name='b1', b_data='i'))
+ eq_(node.children[0], B(id=2, name='b2', b_data='l'))
+
class FlushTest(fixtures.MappedTest):
"""test dependency sorting among inheriting mappers"""
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('email', String(128)),
- Column('password', String(16)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('email', String(128)),
+ Column('password', String(16)))
Table('roles', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('description', String(32))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('description', String(32)))
Table('user_roles', metadata,
- Column('user_id', Integer, ForeignKey('users.id'), primary_key=True),
- Column('role_id', Integer, ForeignKey('roles.id'), primary_key=True)
- )
+ Column('user_id', Integer, ForeignKey(
+ 'users.id'), primary_key=True),
+ Column('role_id', Integer, ForeignKey(
+ 'roles.id'), primary_key=True)
+ )
Table('admins', metadata,
- Column('admin_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('users.id'))
- )
+ Column('admin_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.id')))
def test_one(self):
admins, users, roles, user_roles = (self.tables.admins,
- self.tables.users,
- self.tables.roles,
- self.tables.user_roles)
+ self.tables.users,
+ self.tables.roles,
+ self.tables.user_roles)
- class User(object):pass
- class Role(object):pass
- class Admin(User):pass
+ class User(object):
+ pass
+
+ class Role(object):
+ pass
+
+ class Admin(User):
+ pass
role_mapper = mapper(Role, roles)
- user_mapper = mapper(User, users, properties = {
- 'roles' : relationship(Role, secondary=user_roles, lazy='joined')
- }
- )
+ user_mapper = mapper(User, users, properties={
+ 'roles': relationship(Role, secondary=user_roles, lazy='joined')})
admin_mapper = mapper(Admin, admins, inherits=user_mapper)
sess = create_session()
adminrole = Role()
sess.flush()
# create an Admin, and append a Role. the dependency processors
- # corresponding to the "roles" attribute for the Admin mapper and the User mapper
- # have to ensure that two dependency processors don't fire off and insert the
- # many to many row twice.
+ # corresponding to the "roles" attribute for the Admin mapper and the
+ # User mapper have to ensure that two dependency processors don't fire
+ # off and insert the many to many row twice.
a = Admin()
a.roles.append(adminrole)
a.password = 'admin'
def test_two(self):
admins, users, roles, user_roles = (self.tables.admins,
- self.tables.users,
- self.tables.roles,
- self.tables.user_roles)
+ self.tables.users,
+ self.tables.roles,
+ self.tables.user_roles)
class User(object):
def __init__(self, email=None, password=None):
def __init__(self, description=None):
self.description = description
- class Admin(User):pass
+ class Admin(User):
+ pass
role_mapper = mapper(Role, roles)
- user_mapper = mapper(User, users, properties = {
- 'roles' : relationship(Role, secondary=user_roles, lazy='joined')
- }
- )
+ user_mapper = mapper(User, users, properties={
+ 'roles': relationship(Role, secondary=user_roles, lazy='joined')})
admin_mapper = mapper(Admin, admins, inherits=user_mapper)
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
- )
- Table("b", metadata,
- Column('id', Integer, primary_key=True)
- )
- Table("c", metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
+ Table("b", metadata, Column('id', Integer, primary_key=True))
+ Table("c", metadata, Column('id', Integer, primary_key=True))
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(A):
pass
+
class C(A):
pass
A, B, C = cls.classes.A, cls.classes.B, cls.classes.C
mapper(A, cls.tables.a)
mapper(B, cls.tables.b, inherits=A,
- inherit_condition=cls.tables.a.c.id == cls.tables.b.c.id,
- inherit_foreign_keys=cls.tables.b.c.id)
+ inherit_condition=cls.tables.a.c.id == cls.tables.b.c.id,
+ inherit_foreign_keys=cls.tables.b.c.id)
mapper(C, cls.tables.c, inherits=A,
- inherit_condition=cls.tables.a.c.id == cls.tables.c.c.id,
- inherit_foreign_keys=cls.tables.c.c.id)
+ inherit_condition=cls.tables.a.c.id == cls.tables.c.c.id,
+ inherit_foreign_keys=cls.tables.c.c.id)
def test_ordering(self):
B, C = self.classes.B, self.classes.C
sess = Session()
sess.add_all([B(), C(), B(), C()])
self.assert_sql_execution(
- testing.db,
- sess.flush,
- CompiledSQL(
- "INSERT INTO a () VALUES ()",
- {}
- ),
- CompiledSQL(
- "INSERT INTO a () VALUES ()",
- {}
- ),
+ testing.db,
+ sess.flush,
+ CompiledSQL("INSERT INTO a () VALUES ()", {}),
+ CompiledSQL("INSERT INTO a () VALUES ()", {}),
+ CompiledSQL("INSERT INTO a () VALUES ()", {}),
+ CompiledSQL("INSERT INTO a () VALUES ()", {}),
+ AllOf(
CompiledSQL(
- "INSERT INTO a () VALUES ()",
- {}
+ "INSERT INTO b (id) VALUES (:id)",
+ [{"id": 1}, {"id": 3}]
),
CompiledSQL(
- "INSERT INTO a () VALUES ()",
- {}
- ),
- AllOf(
- CompiledSQL(
- "INSERT INTO b (id) VALUES (:id)",
- [{"id": 1}, {"id": 3}]
- ),
- CompiledSQL(
- "INSERT INTO c (id) VALUES (:id)",
- [{"id": 2}, {"id": 4}]
- )
+ "INSERT INTO c (id) VALUES (:id)",
+ [{"id": 2}, {"id": 4}]
)
+ )
)
+
class VersioningTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('version_id', Integer, nullable=False),
- Column('value', String(40)),
- Column('discriminator', Integer, nullable=False)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer, nullable=False),
+ Column('value', String(40)),
+ Column('discriminator', Integer, nullable=False))
Table('subtable', metadata,
- Column('id', None, ForeignKey('base.id'), primary_key=True),
- Column('subdata', String(50))
- )
+ Column('id', None, ForeignKey('base.id'), primary_key=True),
+ Column('subdata', String(50)))
Table('stuff', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent', Integer, ForeignKey('base.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent', Integer, ForeignKey('base.id')))
@testing.emits_warning(r".*updated rowcount")
@engines.close_open_connections
def test_save_update(self):
subtable, base, stuff = (self.tables.subtable,
- self.tables.base,
- self.tables.stuff)
+ self.tables.base,
+ self.tables.stuff)
class Base(fixtures.BasicEntity):
pass
+
class Sub(Base):
pass
+
class Stuff(Base):
pass
mapper(Stuff, stuff)
mapper(Base, base,
- polymorphic_on=base.c.discriminator,
- version_id_col=base.c.version_id,
- polymorphic_identity=1, properties={
- 'stuff':relationship(Stuff)
- })
+ polymorphic_on=base.c.discriminator,
+ version_id_col=base.c.version_id,
+ polymorphic_identity=1, properties={
+ 'stuff': relationship(Stuff)
+ })
mapper(Sub, subtable, inherits=Base, polymorphic_identity=2)
sess = create_session()
sess.flush()
assert_raises(orm_exc.StaleDataError,
- sess2.query(Base).with_lockmode('read').get,
- s1.id)
+ sess2.query(Base).with_lockmode('read').get,
+ s1.id)
if not testing.db.dialect.supports_sane_rowcount:
sess2.flush()
class Base(fixtures.BasicEntity):
pass
+
class Sub(Base):
pass
mapper(Base, base,
- polymorphic_on=base.c.discriminator,
- version_id_col=base.c.version_id, polymorphic_identity=1)
+ polymorphic_on=base.c.discriminator,
+ version_id_col=base.c.version_id, polymorphic_identity=1)
mapper(Sub, subtable, inherits=Base, polymorphic_identity=2)
sess = create_session()
else:
sess.flush()
+
class DistinctPKTest(fixtures.MappedTest):
"""test the construction of mapper.primary_key when an inheriting relationship
joins on a column other than primary key column."""
global person_table, employee_table, Person, Employee
person_table = Table("persons", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("name", String(80)),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(80)))
employee_table = Table("employees", metadata,
- Column("eid", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("salary", Integer),
- Column("person_id", Integer, ForeignKey("persons.id")),
- )
+ Column("eid", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("salary", Integer),
+ Column("person_id", Integer,
+ ForeignKey("persons.id")))
class Person(object):
def __init__(self, name):
self.name = name
- class Employee(Person): pass
+ class Employee(Person):
+ pass
@classmethod
def insert_data(cls):
person_insert.execute(id=2, name='bob')
employee_insert = employee_table.insert()
- employee_insert.execute(id=2, salary=250, person_id=1) # alice
- employee_insert.execute(id=3, salary=200, person_id=2) # bob
+ employee_insert.execute(id=2, salary=250, person_id=1) # alice
+ employee_insert.execute(id=3, salary=200, person_id=2) # bob
def test_implicit(self):
person_mapper = mapper(Person, person_table)
def test_explicit_props(self):
person_mapper = mapper(Person, person_table)
mapper(Employee, employee_table, inherits=person_mapper,
- properties={'pid':person_table.c.id,
- 'eid':employee_table.c.eid})
+ properties={'pid': person_table.c.id,
+ 'eid': employee_table.c.eid})
self._do_test(False)
def test_explicit_composite_pk(self):
person_mapper = mapper(Person, person_table)
mapper(Employee, employee_table,
- inherits=person_mapper,
- properties=dict(id=[employee_table.c.eid, person_table.c.id]),
- primary_key=[person_table.c.id, employee_table.c.eid])
- assert_raises_message(sa_exc.SAWarning,
- r"On mapper Mapper\|Employee\|employees, "
- "primary key column 'persons.id' is being "
- "combined with distinct primary key column 'employees.eid' "
- "in attribute 'id'. Use explicit properties to give "
- "each column its own mapped attribute name.",
- self._do_test, True
- )
+ inherits=person_mapper,
+ properties=dict(id=[employee_table.c.eid, person_table.c.id]),
+ primary_key=[person_table.c.id, employee_table.c.eid])
+ assert_raises_message(
+ sa_exc.SAWarning,
+ r"On mapper Mapper\|Employee\|employees, "
+ "primary key column 'persons.id' is being "
+ "combined with distinct primary key column 'employees.eid' "
+ "in attribute 'id'. Use explicit properties to give "
+ "each column its own mapped attribute name.", self._do_test, True)
def test_explicit_pk(self):
person_mapper = mapper(Person, person_table)
- mapper(Employee, employee_table, inherits=person_mapper, primary_key=[person_table.c.id])
+ mapper(Employee, employee_table, inherits=person_mapper,
+ primary_key=[person_table.c.id])
self._do_test(False)
def _do_test(self, composite):
query = session.query(Employee)
if composite:
- alice1 = query.get([1,2])
- bob = query.get([2,3])
- alice2 = query.get([1,2])
+ alice1 = query.get([1, 2])
+ bob = query.get([2, 3])
+ alice2 = query.get([1, 2])
else:
alice1 = query.get(1)
bob = query.get(2)
assert alice1.name == alice2.name == 'alice'
assert bob.name == 'bob'
+
class SyncCompileTest(fixtures.MappedTest):
"""test that syncrules compile properly on custom inherit conds"""
global _a_table, _b_table, _c_table
_a_table = Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data1', String(128))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data1', String(128)))
_b_table = Table('b', metadata,
- Column('a_id', Integer, ForeignKey('a.id'), primary_key=True),
- Column('data2', String(128))
- )
+ Column('a_id', Integer, ForeignKey(
+ 'a.id'), primary_key=True),
+ Column('data2', String(128)))
_c_table = Table('c', metadata,
- # Column('a_id', Integer, ForeignKey('b.a_id'), primary_key=True), #works
- Column('b_a_id', Integer, ForeignKey('b.a_id'), primary_key=True),
- Column('data3', String(128))
- )
+ # Column('a_id', Integer, ForeignKey('b.a_id'),
+ # primary_key=True), #works
+ Column('b_a_id', Integer, ForeignKey(
+ 'b.a_id'), primary_key=True),
+ Column('data3', String(128)))
def test_joins(self):
- for j1 in (None, _b_table.c.a_id==_a_table.c.id, _a_table.c.id==_b_table.c.a_id):
- for j2 in (None, _b_table.c.a_id==_c_table.c.b_a_id,
- _c_table.c.b_a_id==_b_table.c.a_id):
+ for j1 in (None, _b_table.c.a_id == _a_table.c.id, _a_table.c.id ==
+ _b_table.c.a_id):
+ for j2 in (None, _b_table.c.a_id == _c_table.c.b_a_id,
+ _c_table.c.b_a_id == _b_table.c.a_id):
self._do_test(j1, j2)
for t in reversed(_a_table.metadata.sorted_tables):
t.delete().execute().close()
def _do_test(self, j1, j2):
class A(object):
- def __init__(self, **kwargs):
- for key, value in list(kwargs.items()):
+ def __init__(self, **kwargs):
+ for key, value in list(kwargs.items()):
setattr(self, key, value)
class B(A):
assert len(session.query(B).all()) == 2
assert len(session.query(C).all()) == 1
+
class OverrideColKeyTest(fixtures.MappedTest):
"""test overriding of column attributes."""
global base, subtable, subtable_two
base = Table('base', metadata,
- Column('base_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(255)),
- Column('sqlite_fixer', String(10))
- )
+ Column('base_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(255)),
+ Column('sqlite_fixer', String(10)))
subtable = Table('subtable', metadata,
- Column('base_id', Integer, ForeignKey('base.base_id'), primary_key=True),
- Column('subdata', String(255))
- )
+ Column('base_id', Integer, ForeignKey(
+ 'base.base_id'), primary_key=True),
+ Column('subdata', String(255)))
subtable_two = Table('subtable_two', metadata,
- Column('base_id', Integer, primary_key=True),
- Column('fk_base_id', Integer, ForeignKey('base.base_id')),
- Column('subdata', String(255))
- )
-
+ Column('base_id', Integer, primary_key=True),
+ Column('fk_base_id', Integer,
+ ForeignKey('base.base_id')),
+ Column('subdata', String(255)))
def test_plain(self):
# control case
class Base(object):
pass
+
class Sub(Base):
pass
class Base(object):
pass
+
class Sub(Base):
pass
mapper(Base, base, properties={
- 'id':base.c.base_id
+ 'id': base.c.base_id
})
mapper(Sub, subtable, inherits=Base, properties={
# this is the manual way to do it, is not really
# possible in declarative
- 'id':[base.c.base_id, subtable.c.base_id]
+ 'id': [base.c.base_id, subtable.c.base_id]
})
eq_(
def test_override_onlyinparent(self):
class Base(object):
pass
+
class Sub(Base):
pass
mapper(Base, base, properties={
- 'id':base.c.base_id
+ 'id': base.c.base_id
})
mapper(Sub, subtable, inherits=Base)
class Base(object):
pass
+
class Sub(Base):
pass
mapper(Base, base, properties={
- 'id':base.c.base_id
+ 'id': base.c.base_id
})
def go():
mapper(Sub, subtable, inherits=Base, properties={
- 'id':subtable.c.base_id
+ 'id': subtable.c.base_id
})
# Sub mapper compilation needs to detect that "base.c.base_id"
# is renamed in the inherited mapper as "id", even though
def test_pk_fk_different(self):
class Base(object):
pass
+
class Sub(Base):
pass
)
def test_plain_descriptor(self):
- """test that descriptors prevent inheritance from propigating properties to subclasses."""
+ """test that descriptors prevent inheritance from propigating
+ properties to subclasses."""
class Base(object):
pass
+
class Sub(Base):
@property
def data(self):
assert sess.query(Sub).one().data == "im the data"
def test_custom_descriptor(self):
- """test that descriptors prevent inheritance from propigating properties to subclasses."""
+ """test that descriptors prevent inheritance from propigating
+ properties to subclasses."""
class MyDesc(object):
def __get__(self, instance, owner):
class Base(object):
pass
+
class Sub(Base):
data = MyDesc()
assert sess.query(Base).get(b1.base_id).data == "this is base"
assert sess.query(Sub).get(s1.base_id).data == "this is base"
+
class OptimizedLoadTest(fixtures.MappedTest):
"""tests for the "optimized load" routine."""
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('type', String(50)),
- Column('counter', Integer, server_default="1")
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('type', String(50)),
+ Column('counter', Integer, server_default="1"))
Table('sub', metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('sub', String(50)),
- Column('subcounter', Integer, server_default="1"),
- Column('subcounter2', Integer, server_default="1")
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column('sub', String(50)),
+ Column('subcounter', Integer, server_default="1"),
+ Column('subcounter2', Integer, server_default="1"))
Table('subsub', metadata,
- Column('id', Integer, ForeignKey('sub.id'), primary_key=True),
- Column('subsubcounter2', Integer, server_default="1")
- )
+ Column('id', Integer, ForeignKey('sub.id'), primary_key=True),
+ Column('subsubcounter2', Integer, server_default="1"))
Table('with_comp', metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('a', String(10)),
- Column('b', String(10))
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column('a', String(10)),
+ Column('b', String(10)))
def test_no_optimize_on_map_to_join(self):
base, sub = self.tables.base, self.tables.sub
class JoinBase(fixtures.ComparableEntity):
pass
+
class SubJoinBase(JoinBase):
pass
mapper(Base, base)
mapper(JoinBase, base.outerjoin(sub), properties=util.OrderedDict(
- [('id', [base.c.id, sub.c.id]),
- ('counter', [base.c.counter, sub.c.subcounter])])
- )
+ [('id', [base.c.id, sub.c.id]),
+ ('counter', [base.c.counter, sub.c.subcounter])]))
mapper(SubJoinBase, inherits=JoinBase)
sess = Session()
eq_(sjb.data, 'data')
self.assert_sql_execution(
- testing.db,
- go,
+ testing.db, go,
CompiledSQL(
"SELECT base.id AS base_id, sub.id AS sub_id, "
- "base.counter AS base_counter, sub.subcounter AS sub_subcounter, "
+ "base.counter AS base_counter, "
+ "sub.subcounter AS sub_subcounter, "
"base.data AS base_data, base.type AS base_type, "
"sub.sub AS sub_sub, sub.subcounter2 AS sub_subcounter2 "
"FROM base LEFT OUTER JOIN sub ON base.id = sub.id "
- "WHERE base.id = :param_1",
- {'param_1': sjb_id}
- ),
- )
-
+ "WHERE base.id = :param_1", {'param_1': sjb_id}))
def test_optimized_passes(self):
""""test that the 'optimized load' routine doesn't crash when
base, sub = self.tables.base, self.tables.sub
-
class Base(fixtures.ComparableEntity):
pass
+
class Sub(Base):
pass
- mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
+ mapper(Base, base, polymorphic_on=base.c.type,
+ polymorphic_identity='base')
# redefine Sub's "id" to favor the "id" col in the subtable.
# "id" is also part of the primary join condition
mapper(Sub, sub, inherits=Base,
- polymorphic_identity='sub',
- properties={'id':[sub.c.id, base.c.id]})
+ polymorphic_identity='sub',
+ properties={'id': [sub.c.id, base.c.id]})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
sess.add(s1)
# load s1 via Base. s1.id won't populate since it's relative to
# the "sub" table. The optimized load kicks in and tries to
- # generate on the primary join, but cannot since "id" is itself unloaded.
- # the optimized load needs to return "None" so regular full-row loading proceeds
+ # generate on the primary join, but cannot since "id" is itself
+ # unloaded. the optimized load needs to return "None" so regular
+ # full-row loading proceeds
s1 = sess.query(Base).first()
assert s1.sub == 's1sub'
class Base(fixtures.ComparableEntity):
pass
+
class Sub(Base):
pass
- mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
- mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={
- 'concat': column_property(sub.c.sub + "|" + sub.c.sub)
- })
+ mapper(Base, base, polymorphic_on=base.c.type,
+ polymorphic_identity='base')
+ mapper(Sub, sub, inherits=Base, polymorphic_identity='sub',
+ properties={
+ 'concat': column_property(sub.c.sub + "|" + sub.c.sub)})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
sess.add(s1)
class Base(fixtures.ComparableEntity):
pass
+
class Sub(Base):
pass
- mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
- mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={
- 'concat': column_property(base.c.data + "|" + sub.c.sub)
- })
+ mapper(Base, base, polymorphic_on=base.c.type,
+ polymorphic_identity='base')
+ mapper(Sub, sub, inherits=Base, polymorphic_identity='sub',
+ properties={
+ 'concat': column_property(base.c.data + "|" + sub.c.sub)})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
s2 = Sub(data='s2data', sub='s2sub')
class Base(fixtures.BasicEntity):
pass
+
class WithComp(Base):
pass
+
class Comp(object):
def __init__(self, a, b):
self.a = a
self.b = b
+
def __composite_values__(self):
return self.a, self.b
+
def __eq__(self, other):
return (self.a == other.a) and (self.b == other.b)
- mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
- mapper(WithComp, with_comp, inherits=Base, polymorphic_identity='wc', properties={
- 'comp': composite(Comp, with_comp.c.a, with_comp.c.b)
- })
+ mapper(Base, base, polymorphic_on=base.c.type,
+ polymorphic_identity='base')
+ mapper(WithComp, with_comp, inherits=Base, polymorphic_identity='wc',
+ properties={'comp': composite(Comp,
+ with_comp.c.a, with_comp.c.b)})
sess = sessionmaker()()
s1 = WithComp(data='s1data', comp=Comp('ham', 'cheese'))
s2 = WithComp(data='s2data', comp=Comp('bacon', 'eggs'))
class Base(fixtures.BasicEntity):
pass
+
class Sub(Base):
pass
- mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
+ mapper(Base, base, polymorphic_on=base.c.type,
+ polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub')
sess = Session()
s1 = Sub(data='s1')
sess.add(s1)
self.assert_sql_execution(
- testing.db,
- sess.flush,
- CompiledSQL(
- "INSERT INTO base (data, type) VALUES (:data, :type)",
- [{'data':'s1','type':'sub'}]
- ),
- CompiledSQL(
- "INSERT INTO sub (id, sub) VALUES (:id, :sub)",
- lambda ctx:{'id':s1.id, 'sub':None}
- ),
+ testing.db,
+ sess.flush,
+ CompiledSQL(
+ "INSERT INTO base (data, type) VALUES (:data, :type)",
+ [{'data': 's1', 'type': 'sub'}]
+ ),
+ CompiledSQL(
+ "INSERT INTO sub (id, sub) VALUES (:id, :sub)",
+ lambda ctx: {'id': s1.id, 'sub': None}
+ ),
)
+
def go():
- eq_( s1.subcounter2, 1 )
+ eq_(s1.subcounter2, 1)
self.assert_sql_execution(
- testing.db,
- go,
+ testing.db, go,
CompiledSQL(
- "SELECT base.counter AS base_counter, sub.subcounter AS sub_subcounter, "
+ "SELECT base.counter AS base_counter, "
+ "sub.subcounter AS sub_subcounter, "
"sub.subcounter2 AS sub_subcounter2 FROM base JOIN sub "
"ON base.id = sub.id WHERE base.id = :param_1",
- lambda ctx:{'param_1': s1.id}
- ),
- )
+ lambda ctx: {'param_1': s1.id}))
def test_dont_generate_on_none(self):
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.BasicEntity):
pass
+
class Sub(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type,
- polymorphic_identity='base')
+ polymorphic_identity='base')
m = mapper(Sub, sub, inherits=Base, polymorphic_identity='sub')
s1 = Sub()
assert m._optimized_get_statement(attributes.instance_state(s1),
- ['subcounter2']) is None
+ ['subcounter2']) is None
# loads s1.id as None
eq_(s1.id, None)
# this now will come up with a value of None for id - should reject
assert m._optimized_get_statement(attributes.instance_state(s1),
- ['subcounter2']) is None
+ ['subcounter2']) is None
s1.id = 1
attributes.instance_state(s1)._commit_all(s1.__dict__, None)
assert m._optimized_get_statement(attributes.instance_state(s1),
- ['subcounter2']) is not None
+ ['subcounter2']) is not None
def test_load_expired_on_pending_twolevel(self):
base, sub, subsub = (self.tables.base,
- self.tables.sub,
- self.tables.subsub)
+ self.tables.sub,
+ self.tables.subsub)
class Base(fixtures.BasicEntity):
pass
+
class Sub(Base):
pass
+
class SubSub(Sub):
pass
mapper(Base, base, polymorphic_on=base.c.type,
- polymorphic_identity='base')
+ polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub')
mapper(SubSub, subsub, inherits=Sub, polymorphic_identity='subsub')
sess = Session()
s1 = SubSub(data='s1', counter=1, subcounter=2)
sess.add(s1)
self.assert_sql_execution(
- testing.db,
- sess.flush,
- CompiledSQL(
- "INSERT INTO base (data, type, counter) VALUES "
- "(:data, :type, :counter)",
- [{'data':'s1','type':'subsub','counter':1}]
- ),
- CompiledSQL(
- "INSERT INTO sub (id, sub, subcounter) VALUES "
- "(:id, :sub, :subcounter)",
- lambda ctx:[{'subcounter': 2, 'sub': None, 'id': s1.id}]
- ),
- CompiledSQL(
- "INSERT INTO subsub (id) VALUES (:id)",
- lambda ctx:{'id':s1.id}
- ),
+ testing.db,
+ sess.flush,
+ CompiledSQL(
+ "INSERT INTO base (data, type, counter) VALUES "
+ "(:data, :type, :counter)",
+ [{'data': 's1', 'type': 'subsub', 'counter': 1}]
+ ),
+ CompiledSQL(
+ "INSERT INTO sub (id, sub, subcounter) VALUES "
+ "(:id, :sub, :subcounter)",
+ lambda ctx: [{'subcounter': 2, 'sub': None, 'id': s1.id}]
+ ),
+ CompiledSQL(
+ "INSERT INTO subsub (id) VALUES (:id)",
+ lambda ctx: {'id': s1.id}
+ ),
)
def go():
def _fixture(self):
metadata = MetaData()
parent = Table('parent', metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True))
child = Table('child', metadata,
- Column('id', Integer, ForeignKey('parent.id'))
- )
+ Column('id', Integer, ForeignKey('parent.id')))
return parent, child
def tearDown(self):
class P(object):
pass
+
class C(P):
pass
class P(object):
pass
+
class C(P):
pass
mc = mapper(C, child, inherits=P, primary_key=[parent.c.id])
eq_(mc.primary_key, (parent.c.id,))
+
class InhCondTest(fixtures.TestBase):
def test_inh_cond_nonexistent_table_unrelated(self):
metadata = MetaData()
base_table = Table("base", metadata,
- Column("id", Integer, primary_key=True)
- )
+ Column("id", Integer, primary_key=True))
derived_table = Table("derived", metadata,
- Column("id", Integer, ForeignKey("base.id"), primary_key=True),
- Column("owner_id", Integer, ForeignKey("owner.owner_id"))
- )
+ Column("id", Integer, ForeignKey(
+ "base.id"), primary_key=True),
+ Column("owner_id", Integer,
+ ForeignKey("owner.owner_id")))
class Base(object):
pass
m2 = mapper(Derived, derived_table,
inherits=Base)
assert m2.inherit_condition.compare(
- base_table.c.id==derived_table.c.id
- )
+ base_table.c.id == derived_table.c.id
+ )
def test_inh_cond_nonexistent_col_unrelated(self):
m = MetaData()
base_table = Table("base", m,
- Column("id", Integer, primary_key=True)
- )
+ Column("id", Integer, primary_key=True))
derived_table = Table("derived", m,
- Column("id", Integer, ForeignKey('base.id'),
- primary_key=True),
- Column('order_id', Integer, ForeignKey('order.foo'))
- )
- order_table = Table('order', m, Column('id', Integer, primary_key=True))
+ Column("id", Integer, ForeignKey('base.id'),
+ primary_key=True),
+ Column('order_id', Integer,
+ ForeignKey('order.foo')))
+ order_table = Table('order', m, Column(
+ 'id', Integer, primary_key=True))
+
class Base(object):
pass
# succeeds, despite "order.foo" doesn't exist
m2 = mapper(Derived, derived_table, inherits=Base)
assert m2.inherit_condition.compare(
- base_table.c.id==derived_table.c.id
- )
+ base_table.c.id == derived_table.c.id
+ )
def test_inh_cond_no_fk(self):
metadata = MetaData()
base_table = Table("base", metadata,
- Column("id", Integer, primary_key=True)
- )
+ Column("id", Integer, primary_key=True))
derived_table = Table("derived", metadata,
- Column("id", Integer, primary_key=True),
- )
+ Column("id", Integer, primary_key=True))
class Base(object):
pass
m1 = MetaData()
m2 = MetaData()
base_table = Table("base", m1,
- Column("id", Integer, primary_key=True)
- )
+ Column("id", Integer, primary_key=True))
derived_table = Table("derived", m2,
- Column("id", Integer, ForeignKey('base.id'),
- primary_key=True),
- )
+ Column("id", Integer, ForeignKey('base.id'),
+ primary_key=True))
class Base(object):
pass
def test_inh_cond_nonexistent_col_related(self):
m = MetaData()
base_table = Table("base", m,
- Column("id", Integer, primary_key=True)
- )
+ Column("id", Integer, primary_key=True))
derived_table = Table("derived", m,
- Column("id", Integer, ForeignKey('base.q'),
- primary_key=True),
- )
+ Column("id", Integer, ForeignKey('base.q'),
+ primary_key=True))
class Base(object):
pass
@classmethod
def define_tables(cls, metadata):
parents = Table('parents', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(60)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(60)))
children = Table('children', metadata,
- Column('id', Integer, ForeignKey('parents.id'),
- primary_key=True),
- Column('type', Integer,primary_key=True),
- Column('name', String(60)))
+ Column('id', Integer, ForeignKey('parents.id'),
+ primary_key=True),
+ Column('type', Integer, primary_key=True),
+ Column('name', String(60)))
def test_pk_as_discriminator(self):
parents, children = self.tables.parents, self.tables.children
class Parent(object):
- def __init__(self, name=None):
- self.name = name
+ def __init__(self, name=None):
+ self.name = name
class Child(object):
def __init__(self, name=None):
'children': relationship(Child, backref='parent'),
})
mapper(Child, children, polymorphic_on=children.c.type,
- polymorphic_identity=1)
+ polymorphic_identity=1)
mapper(A, inherits=Child, polymorphic_identity=2)
assert a.id
assert a.type == 2
- p.name='p1new'
- a.name='a1new'
+ p.name = 'p1new'
+ a.name = 'a1new'
s.flush()
s.expire_all()
- assert a.name=='a1new'
- assert p.name=='p1new'
+ assert a.name == 'a1new'
+ assert p.name == 'p1new'
+
class NoPolyIdentInMiddleTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(50), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(50), nullable=False))
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(A):
pass
+
class C(B):
pass
+
class D(B):
pass
+
class E(A):
pass
@classmethod
def setup_mappers(cls):
A, C, B, E, D, base = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.E,
- cls.classes.D,
- cls.tables.base)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.E,
+ cls.classes.D,
+ cls.tables.base)
mapper(A, base, polymorphic_on=base.c.type)
mapper(B, inherits=A, )
def test_discriminator(self):
C, B, base = (self.classes.C,
- self.classes.B,
- self.tables.base)
+ self.classes.B,
+ self.tables.base)
assert class_mapper(B).polymorphic_on is base.c.type
assert class_mapper(C).polymorphic_on is base.c.type
def test_load_multiple_from_middle(self):
C, B, E, D, base = (self.classes.C,
- self.classes.B,
- self.classes.E,
- self.classes.D,
- self.tables.base)
+ self.classes.B,
+ self.classes.E,
+ self.classes.D,
+ self.tables.base)
s = Session()
s.add_all([C(), D(), E()])
[C(), D()]
)
+
class DeleteOrphanTest(fixtures.MappedTest):
"""Test the fairly obvious, that an error is raised
when attempting to insert an orphan.
def define_tables(cls, metadata):
global single, parent
single = Table('single', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('type', String(50), nullable=False),
- Column('data', String(50)),
- Column('parent_id', Integer, ForeignKey('parent.id'), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(50), nullable=False),
+ Column('data', String(50)),
+ Column('parent_id', Integer, ForeignKey(
+ 'parent.id'), nullable=False),
+ )
parent = Table('parent', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
def test_orphan_message(self):
class Base(fixtures.BasicEntity):
class Parent(fixtures.BasicEntity):
pass
- mapper(Base, single, polymorphic_on=single.c.type, polymorphic_identity='base')
+ mapper(Base, single, polymorphic_on=single.c.type,
+ polymorphic_identity='base')
mapper(SubClass, inherits=Base, polymorphic_identity='sub')
mapper(Parent, parent, properties={
- 'related':relationship(Base, cascade="all, delete-orphan")
+ 'related': relationship(Base, cascade="all, delete-orphan")
})
sess = create_session()
sess.add(s1)
assert_raises(sa_exc.DBAPIError, sess.flush)
+
class PolymorphicUnionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
def _fixture(self):
t1 = table('t1', column('c1', Integer),
- column('c2', Integer),
- column('c3', Integer))
+ column('c2', Integer),
+ column('c3', Integer))
t2 = table('t2', column('c1', Integer), column('c2', Integer),
- column('c3', Integer),
- column('c4', Integer))
+ column('c3', Integer),
+ column('c4', Integer))
t3 = table('t3', column('c1', Integer),
- column('c3', Integer),
- column('c5', Integer))
+ column('c3', Integer),
+ column('c5', Integer))
return t1, t2, t3
def test_type_col_present(self):
@classmethod
def define_tables(cls, metadata):
content = Table('content', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(30)))
foo = Table('foo', metadata,
- Column('id', Integer, ForeignKey('content.id'),
- primary_key=True),
- Column('content_type', String(30))
- )
+ Column('id', Integer, ForeignKey('content.id'),
+ primary_key=True),
+ Column('content_type', String(30)))
def test_name_conflict(self):
class Content(object):
pass
+
class Foo(Content):
pass
mapper(Content, self.tables.content,
- polymorphic_on=self.tables.content.c.type)
+ polymorphic_on=self.tables.content.c.type)
mapper(Foo, self.tables.foo, inherits=Content,
- polymorphic_identity='foo')
+ polymorphic_identity='foo')
sess = create_session()
f = Foo()
f.content_type = 'bar'
name,
nickname,
engineer_info,
- ):
+ ):
self.name = name
self.nickname = nickname
self.engineer_info = engineer_info
def define_tables(cls, metadata):
global managers_table, engineers_table, hackers_table, \
companies, employees_table
- companies = Table('companies', metadata, Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True), Column('name'
- , String(50)))
+ companies = Table('companies', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
employees_table = Table('employees', metadata,
Column('employee_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
+ primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)),
Column('company_id', Integer,
- ForeignKey('companies.id')))
+ ForeignKey('companies.id')))
managers_table = Table(
'managers',
metadata,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('manager_data', String(50)),
- Column('company_id', Integer, ForeignKey('companies.id')),
- )
+ Column('company_id', Integer, ForeignKey('companies.id')))
engineers_table = Table(
'engineers',
metadata,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('engineer_info', String(50)),
- Column('company_id', Integer, ForeignKey('companies.id')),
- )
+ Column('company_id', Integer, ForeignKey('companies.id')))
hackers_table = Table(
'hackers',
metadata,
Column('name', String(50)),
Column('engineer_info', String(50)),
Column('company_id', Integer, ForeignKey('companies.id')),
- Column('nickname', String(50)),
- )
+ Column('nickname', String(50)))
def test_basic(self):
- pjoin = polymorphic_union({'manager': managers_table, 'engineer'
- : engineers_table}, 'type', 'pjoin')
+ pjoin = polymorphic_union(
+ {'manager': managers_table, 'engineer': engineers_table},
+ 'type', 'pjoin')
employee_mapper = mapper(Employee, pjoin,
polymorphic_on=pjoin.c.type)
manager_mapper = mapper(Manager, managers_table,
session.expunge_all()
assert set([repr(x) for x in session.query(Employee)]) \
== set(['Engineer Kurt knows how to hack',
- 'Manager Tom knows how to manage things'])
+ 'Manager Tom knows how to manage things'])
assert set([repr(x) for x in session.query(Manager)]) \
== set(['Manager Tom knows how to manage things'])
assert set([repr(x) for x in session.query(Engineer)]) \
eq_(manager.manager_data, 'knows how to manage things')
def test_multi_level_no_base(self):
- pjoin = polymorphic_union({'manager': managers_table, 'engineer'
- : engineers_table, 'hacker'
- : hackers_table}, 'type', 'pjoin')
+ pjoin = polymorphic_union(
+ {'manager': managers_table, 'engineer': engineers_table,
+ 'hacker': hackers_table},
+ 'type', 'pjoin')
pjoin2 = polymorphic_union({'engineer': engineers_table,
- 'hacker': hackers_table}, 'type',
+ 'hacker': hackers_table}, 'type',
'pjoin2')
employee_mapper = mapper(Employee, pjoin,
polymorphic_on=pjoin.c.type)
polymorphic_on=pjoin2.c.type,
inherits=employee_mapper,
concrete=True,
- polymorphic_identity='engineer',
- )
+ polymorphic_identity='engineer')
hacker_mapper = mapper(Hacker, hackers_table,
inherits=engineer_mapper, concrete=True,
polymorphic_identity='hacker')
session = create_session()
tom = Manager('Tom', 'knows how to manage things')
- assert_raises_message(AttributeError,
+ assert_raises_message(
+ AttributeError,
"does not implement attribute .?'type' at the instance level.",
setattr, tom, "type", "sometype")
jerry = Engineer('Jerry', 'knows how to program')
hacker = Hacker('Kurt', 'Badass', 'knows how to hack')
- assert_raises_message(AttributeError,
+ assert_raises_message(
+ AttributeError,
"does not implement attribute .?'type' at the instance level.",
setattr, hacker, "type", "sometype")
self.assert_sql_count(testing.db, go, 0)
session.expunge_all()
- assert repr(session.query(Employee).filter(Employee.name
- == 'Tom').one()) \
+ assert repr(session.query(Employee).filter(Employee.name == 'Tom')
+ .one()) \
+ == 'Manager Tom knows how to manage things'
+ assert repr(session.query(Manager)
+ .filter(Manager.name == 'Tom').one()) \
== 'Manager Tom knows how to manage things'
- assert repr(session.query(Manager).filter(Manager.name == 'Tom'
- ).one()) == 'Manager Tom knows how to manage things'
assert set([repr(x) for x in session.query(Employee).all()]) \
== set(['Engineer Jerry knows how to program',
- 'Manager Tom knows how to manage things',
- "Hacker Kurt 'Badass' knows how to hack"])
+ 'Manager Tom knows how to manage things',
+ "Hacker Kurt 'Badass' knows how to hack"])
assert set([repr(x) for x in session.query(Manager).all()]) \
== set(['Manager Tom knows how to manage things'])
assert set([repr(x) for x in session.query(Engineer).all()]) \
== set(['Engineer Jerry knows how to program',
- "Hacker Kurt 'Badass' knows how to hack"])
+ "Hacker Kurt 'Badass' knows how to hack"])
assert set([repr(x) for x in session.query(Hacker).all()]) \
== set(["Hacker Kurt 'Badass' knows how to hack"])
'manager': managers_table,
'engineer': engineers_table,
'hacker': hackers_table,
- }, 'type', 'pjoin')
+ }, 'type', 'pjoin')
pjoin2 = polymorphic_union({'engineer': engineers_table,
- 'hacker': hackers_table}, 'type',
+ 'hacker': hackers_table}, 'type',
'pjoin2')
employee_mapper = mapper(Employee, employees_table,
with_polymorphic=('*', pjoin),
polymorphic_on=pjoin2.c.type,
inherits=employee_mapper,
concrete=True,
- polymorphic_identity='engineer',
- )
+ polymorphic_identity='engineer')
hacker_mapper = mapper(Hacker, hackers_table,
inherits=engineer_mapper, concrete=True,
polymorphic_identity='hacker')
# is not rendered in the statement which is only against
# Employee's "pjoin"
- assert len(testing.db.execute(session.query(Employee).with_labels().statement).fetchall()) \
- == 3
+ assert len(testing.db.execute(session.query(
+ Employee).with_labels().statement).fetchall()) == 3
assert set([repr(x) for x in session.query(Employee)]) \
== set(['Engineer Jerry knows how to program',
- 'Manager Tom knows how to manage things',
- "Hacker Kurt 'Badass' knows how to hack"])
+ 'Manager Tom knows how to manage things',
+ "Hacker Kurt 'Badass' knows how to hack"])
assert set([repr(x) for x in session.query(Manager)]) \
== set(['Manager Tom knows how to manage things'])
assert set([repr(x) for x in session.query(Engineer)]) \
== set(['Engineer Jerry knows how to program',
- "Hacker Kurt 'Badass' knows how to hack"])
+ "Hacker Kurt 'Badass' knows how to hack"])
assert set([repr(x) for x in session.query(Hacker)]) \
== set(["Hacker Kurt 'Badass' knows how to hack"])
'manager': managers_table,
'engineer': engineers_table,
'hacker': hackers_table,
- }, 'type', 'pjoin')
+ }, 'type', 'pjoin')
pjoin2 = polymorphic_union({'engineer': engineers_table,
- 'hacker': hackers_table}, 'type',
+ 'hacker': hackers_table}, 'type',
'pjoin2')
employee_mapper = mapper(Employee, employees_table,
polymorphic_identity='employee')
hacker = Hacker('Kurt', 'Badass', 'knows how to hack')
session.add_all((jdoe, tom, jerry, hacker))
session.flush()
- eq_(len(testing.db.execute(session.query(Employee).with_polymorphic('*'
- , pjoin,
- pjoin.c.type).with_labels().statement).fetchall()), 4)
+ eq_(len(testing.db.execute(session.query(Employee).with_polymorphic(
+ '*', pjoin, pjoin.c.type).with_labels().statement).fetchall()), 4)
eq_(session.query(Employee).get(jdoe.employee_id), jdoe)
eq_(session.query(Engineer).get(jerry.employee_id), jerry)
eq_(set([repr(x) for x in
- session.query(Employee).with_polymorphic('*', pjoin,
- pjoin.c.type)]), set(['Employee Jdoe',
- 'Engineer Jerry knows how to program',
- 'Manager Tom knows how to manage things',
- "Hacker Kurt 'Badass' knows how to hack"]))
+ session.query(Employee).with_polymorphic('*', pjoin,
+ pjoin.c.type)]),
+ set(['Employee Jdoe',
+ 'Engineer Jerry knows how to program',
+ 'Manager Tom knows how to manage things',
+ "Hacker Kurt 'Badass' knows how to hack"]))
eq_(set([repr(x) for x in session.query(Manager)]),
set(['Manager Tom knows how to manage things']))
eq_(set([repr(x) for x in
- session.query(Engineer).with_polymorphic('*', pjoin2,
- pjoin2.c.type)]), set(['Engineer Jerry knows how to program'
- , "Hacker Kurt 'Badass' knows how to hack"]))
+ session.query(Engineer).with_polymorphic('*',
+ pjoin2,
+ pjoin2.c.type)]),
+ set(['Engineer Jerry knows how to program',
+ "Hacker Kurt 'Badass' knows how to hack"]))
eq_(set([repr(x) for x in session.query(Hacker)]),
set(["Hacker Kurt 'Badass' knows how to hack"]))
# test adaption of the column by wrapping the query in a
# subquery
- eq_(len(testing.db.execute(session.query(Engineer).with_polymorphic('*'
- , pjoin2,
- pjoin2.c.type).from_self().statement).fetchall()), 2)
+ eq_(len(testing.db.execute(session.query(Engineer).with_polymorphic(
+ '*', pjoin2, pjoin2.c.type).from_self().statement).fetchall()), 2)
eq_(set([repr(x) for x in
- session.query(Engineer).with_polymorphic('*', pjoin2,
- pjoin2.c.type).from_self()]),
+ session.query(Engineer)
+ .with_polymorphic('*', pjoin2, pjoin2.c.type)
+ .from_self()]),
set(['Engineer Jerry knows how to program',
- "Hacker Kurt 'Badass' knows how to hack"]))
+ "Hacker Kurt 'Badass' knows how to hack"]))
def test_relationship(self):
- pjoin = polymorphic_union({'manager': managers_table, 'engineer'
- : engineers_table}, 'type', 'pjoin')
- mapper(Company, companies, properties={'employees'
- : relationship(Employee)})
+ pjoin = polymorphic_union(
+ {'manager': managers_table, 'engineer': engineers_table},
+ 'type', 'pjoin')
+ mapper(Company, companies, properties={
+ 'employees': relationship(Employee)})
employee_mapper = mapper(Employee, pjoin,
polymorphic_on=pjoin.c.type)
manager_mapper = mapper(Manager, managers_table,
c2 = session.query(Company).get(c.id)
assert set([repr(x) for x in c2.employees]) \
== set(['Engineer Kurt knows how to hack',
- 'Manager Tom knows how to manage things'])
+ 'Manager Tom knows how to manage things'])
self.assert_sql_count(testing.db, go, 2)
session.expunge_all()
def go():
c2 = \
- session.query(Company).options(joinedload(Company.employees)).get(c.id)
+ session.query(Company).options(
+ joinedload(Company.employees)).get(c.id)
assert set([repr(x) for x in c2.employees]) \
== set(['Engineer Kurt knows how to hack',
- 'Manager Tom knows how to manage things'])
+ 'Manager Tom knows how to manage things'])
self.assert_sql_count(testing.db, go, 1)
@classmethod
def define_tables(cls, metadata):
- Table('a_table', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Table('a_table', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('some_dest_id', Integer, ForeignKey('dest_table.id')),
Column('aname', String(50)))
- Table('b_table', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Table('b_table', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('some_dest_id', Integer, ForeignKey('dest_table.id')),
Column('bname', String(50)))
- Table('c_table', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- Column('some_dest_id', Integer, ForeignKey('dest_table.id')),
- Column('cname', String(50)))
+ Table('c_table', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('some_dest_id', Integer, ForeignKey('dest_table.id')),
+ Column('cname', String(50)))
- Table('dest_table', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Table('dest_table', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)))
@classmethod
def test_noninherited_warning(self):
A, B, b_table, a_table, Dest, dest_table = (self.classes.A,
- self.classes.B,
- self.tables.b_table,
- self.tables.a_table,
- self.classes.Dest,
- self.tables.dest_table)
+ self.classes.B,
+ self.tables.b_table,
+ self.tables.a_table,
+ self.classes.Dest,
+ self.tables.dest_table)
mapper(A, a_table, properties={'some_dest': relationship(Dest)})
mapper(B, b_table, inherits=A, concrete=True)
def test_inheriting(self):
A, B, b_table, a_table, Dest, dest_table = (self.classes.A,
- self.classes.B,
- self.tables.b_table,
- self.tables.a_table,
- self.classes.Dest,
- self.tables.dest_table)
+ self.classes.B,
+ self.tables.b_table,
+ self.tables.a_table,
+ self.classes.Dest,
+ self.tables.dest_table)
mapper(A, a_table, properties={
- 'some_dest': relationship(Dest,back_populates='many_a')
- })
+ 'some_dest': relationship(Dest, back_populates='many_a')
+ })
mapper(B, b_table, inherits=A, concrete=True,
properties={
- 'some_dest': relationship(Dest, back_populates='many_b')
- })
+ 'some_dest': relationship(Dest, back_populates='many_b')
+ })
mapper(Dest, dest_table, properties={
- 'many_a': relationship(A,back_populates='some_dest'),
- 'many_b': relationship(B,back_populates='some_dest')
- })
+ 'many_a': relationship(A, back_populates='some_dest'),
+ 'many_b': relationship(B, back_populates='some_dest')
+ })
sess = sessionmaker()()
dest1 = Dest(name='c1')
dest2 = Dest(name='c2')
mapper(Dest, dest_table)
configure_mappers()
-
def test_polymorphic_backref(self):
"""test multiple backrefs to the same polymorphically-loading
attribute."""
- A, C, B, c_table, b_table, a_table, Dest, dest_table = (self.classes.A,
- self.classes.C,
- self.classes.B,
- self.tables.c_table,
- self.tables.b_table,
- self.tables.a_table,
- self.classes.Dest,
- self.tables.dest_table)
-
+ A, C, B, c_table, b_table, a_table, Dest, dest_table = (
+ self.classes.A, self.classes.C, self.classes.B, self.tables.
+ c_table, self.tables.b_table, self.tables.a_table, self.classes.
+ Dest, self.tables.dest_table)
- ajoin = polymorphic_union({'a': a_table, 'b': b_table, 'c':c_table},
- 'type','ajoin')
+ ajoin = polymorphic_union({'a': a_table, 'b': b_table, 'c': c_table},
+ 'type', 'ajoin')
mapper(
A,
a_table,
polymorphic_identity='a',
properties={
'some_dest': relationship(Dest, back_populates='many_a')
- },
- )
+ },
+ )
mapper(
B,
b_table,
concrete=True,
polymorphic_identity='b',
properties={
- 'some_dest': relationship(Dest, back_populates='many_a')},
- )
+ 'some_dest': relationship(Dest, back_populates='many_a')},
+ )
mapper(
C,
concrete=True,
polymorphic_identity='c',
properties={
- 'some_dest': relationship(Dest, back_populates='many_a')},
- )
+ 'some_dest': relationship(Dest, back_populates='many_a')},
+ )
mapper(Dest, dest_table, properties={
- 'many_a': relationship(A,
- back_populates='some_dest',
- order_by=ajoin.c.id)
- }
- )
+ 'many_a': relationship(A,
+ back_populates='some_dest',
+ order_by=ajoin.c.id)})
sess = sessionmaker()()
dest1 = Dest(name='c1')
eq_(
[
Dest(many_a=[A(aname='a1'),
- B(bname='b1'),
- B(bname='b2'),
- C(cname='c1')]),
+ B(bname='b1'),
+ B(bname='b2'),
+ C(cname='c1')]),
Dest(many_a=[A(aname='a2'), C(cname='c2')])],
- sess.query(Dest).options(joinedload(Dest.many_a)).order_by(Dest.id).all())
+ sess.query(Dest).options(joinedload(Dest.many_a))
+ .order_by(Dest.id).all())
self.assert_sql_count(testing.db, go, 1)
def test_merge_w_relationship(self):
- A, C, B, c_table, b_table, a_table, Dest, dest_table = (self.classes.A,
- self.classes.C,
- self.classes.B,
- self.tables.c_table,
- self.tables.b_table,
- self.tables.a_table,
- self.classes.Dest,
- self.tables.dest_table)
-
- ajoin = polymorphic_union({'a': a_table, 'b': b_table, 'c':c_table},
- 'type','ajoin')
+ A, C, B, c_table, b_table, a_table, Dest, dest_table = (
+ self.classes.A, self.classes.C, self.classes.B, self.tables.
+ c_table, self.tables.b_table, self.tables.a_table, self.classes.
+ Dest, self.tables.dest_table)
+
+ ajoin = polymorphic_union({'a': a_table, 'b': b_table, 'c': c_table},
+ 'type', 'ajoin')
mapper(
A,
a_table,
polymorphic_identity='a',
properties={
'some_dest': relationship(Dest, back_populates='many_a')
- },
- )
+ }
+ )
mapper(
B,
b_table,
concrete=True,
polymorphic_identity='b',
properties={
- 'some_dest': relationship(Dest, back_populates='many_a')},
- )
+ 'some_dest': relationship(Dest, back_populates='many_a')}
+ )
mapper(
C,
concrete=True,
polymorphic_identity='c',
properties={
- 'some_dest': relationship(Dest, back_populates='many_a')},
- )
+ 'some_dest': relationship(Dest, back_populates='many_a')}
+ )
mapper(Dest, dest_table, properties={
- 'many_a': relationship(A,
- back_populates='some_dest',
- order_by=ajoin.c.id)
- }
- )
+ 'many_a': relationship(A,
+ back_populates='some_dest',
+ order_by=ajoin.c.id)
+ })
assert C.some_dest.property.parent is class_mapper(C)
assert B.some_dest.property.parent is class_mapper(B)
eq_(merged_c1.some_dest.name, 'd2')
eq_(merged_c1.some_dest_id, c1.some_dest_id)
+
class ManyToManyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True))
+ test_needs_autoincrement=True))
Table('sub', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True))
- Table('base_mtom', metadata, Column('base_id', Integer,
- ForeignKey('base.id'), primary_key=True),
+ test_needs_autoincrement=True))
+ Table('base_mtom', metadata,
+ Column('base_id', Integer, ForeignKey('base.id'),
+ primary_key=True),
Column('related_id', Integer, ForeignKey('related.id'),
- primary_key=True))
- Table('sub_mtom', metadata, Column('base_id', Integer,
- ForeignKey('sub.id'), primary_key=True),
+ primary_key=True))
+ Table('sub_mtom', metadata,
+ Column('base_id', Integer, ForeignKey('sub.id'),
+ primary_key=True),
Column('related_id', Integer, ForeignKey('related.id'),
- primary_key=True))
- Table('related', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True))
+ primary_key=True))
+ Table('related', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
@classmethod
def setup_classes(cls):
class Related(cls.Comparable):
pass
-
def test_selective_relationships(self):
- sub, base_mtom, Related, Base, related, sub_mtom, base, Sub = (self.tables.sub,
- self.tables.base_mtom,
- self.classes.Related,
- self.classes.Base,
- self.tables.related,
- self.tables.sub_mtom,
- self.tables.base,
- self.classes.Sub)
-
- mapper(Base, base, properties={'related': relationship(Related,
- secondary=base_mtom, backref='bases',
- order_by=related.c.id)})
+ sub, base_mtom, Related, Base, related, sub_mtom, base, Sub = (
+ self.tables.sub, self.tables.base_mtom, self.classes.Related, self.
+ classes.Base, self.tables.related, self.tables.sub_mtom, self.
+ tables.base, self.classes.Sub)
+
+ mapper(Base, base, properties={'related': relationship(
+ Related, secondary=base_mtom, backref='bases',
+ order_by=related.c.id)})
mapper(Sub, sub, inherits=Base, concrete=True,
properties={'related': relationship(Related,
- secondary=sub_mtom, backref='subs',
- order_by=related.c.id)})
+ secondary=sub_mtom,
+ backref='subs',
+ order_by=related.c.id)})
mapper(Related, related)
sess = sessionmaker()()
b1, s1, r1, r2, r3 = Base(), Sub(), Related(), Related(), \
@classmethod
def define_tables(cls, metadata):
global offices_table, refugees_table
- refugees_table = Table('refugee', metadata, Column('refugee_fid'
- , Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('refugee_name', String(30),
- key='name'))
- offices_table = Table('office', metadata, Column('office_fid',
- Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('office_name', String(30),
- key='name'))
+ refugees_table = Table(
+ 'refugee', metadata,
+ Column(
+ 'refugee_fid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('refugee_name', String(30),
+ key='name'))
+ offices_table = Table(
+ 'office', metadata,
+ Column(
+ 'office_fid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('office_name', String(30),
+ key='name'))
@classmethod
def insert_data(cls):
- refugees_table.insert().execute(dict(refugee_fid=1,
- name='refugee1'), dict(refugee_fid=2, name='refugee2'
- ))
- offices_table.insert().execute(dict(office_fid=1,
- name='office1'), dict(office_fid=2, name='office2'))
+ refugees_table.insert().execute(
+ dict(refugee_fid=1, name='refugee1'),
+ dict(refugee_fid=2, name='refugee2'))
+ offices_table.insert().execute(
+ dict(office_fid=1, name='office1'),
+ dict(office_fid=2, name='office2'))
def test_keys(self):
- pjoin = polymorphic_union({'refugee': refugees_table, 'office'
- : offices_table}, 'type', 'pjoin')
+ pjoin = polymorphic_union(
+ {'refugee': refugees_table, 'office': offices_table},
+ 'type', 'pjoin')
class Location(object):
pass
class Office(Location):
pass
-
location_mapper = mapper(Location, pjoin,
polymorphic_on=pjoin.c.type,
polymorphic_identity='location')
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Table, Column
+
class BaseObject(object):
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
+
+
class Publication(BaseObject):
pass
+
class Issue(BaseObject):
pass
+
class Location(BaseObject):
def __repr__(self):
- return "%s(%s, %s)" % (self.__class__.__name__, str(getattr(self, 'issue_id', None)), repr(str(self._name.name)))
+ return "%s(%s, %s)" % (self.__class__.__name__,
+ str(getattr(self, 'issue_id', None)),
+ repr(str(self._name.name)))
def _get_name(self):
return self._name
def _set_name(self, name):
session = create_session()
- s = session.query(LocationName).filter(LocationName.name==name).first()
+ s = session.query(LocationName).filter(LocationName.name == name)\
+ .first()
session.expunge_all()
if s is not None:
self._name = s
break
- if found == False:
+ if found is False:
self._name = LocationName(name=name)
name = property(_get_name, _set_name)
+
class LocationName(BaseObject):
def __repr__(self):
return "%s()" % (self.__class__.__name__)
+
class PageSize(BaseObject):
def __repr__(self):
- return "%s(%sx%s, %s)" % (self.__class__.__name__, self.width, self.height, self.name)
+ return "%s(%sx%s, %s)" % (self.__class__.__name__, self.width,
+ self.height, self.name)
+
class Magazine(BaseObject):
def __repr__(self):
- return "%s(%s, %s)" % (self.__class__.__name__, repr(self.location), repr(self.size))
+ return "%s(%s, %s)" % (self.__class__.__name__, repr(self.location),
+ repr(self.size))
+
class Page(BaseObject):
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, str(self.page_no))
+
class MagazinePage(Page):
def __repr__(self):
- return "%s(%s, %s)" % (self.__class__.__name__, str(self.page_no), repr(self.magazine))
+ return "%s(%s, %s)" % (self.__class__.__name__, str(self.page_no),
+ repr(self.magazine))
+
class ClassifiedPage(MagazinePage):
pass
class MagazineTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- global publication_table, issue_table, location_table, location_name_table, magazine_table, \
- page_table, magazine_page_table, classified_page_table, page_size_table
+ global publication_table, issue_table, location_table,\
+ location_name_table, magazine_table, page_table,\
+ magazine_page_table, classified_page_table, page_size_table
publication_table = Table('publication', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(45), default=''),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(45), default=''))
issue_table = Table('issue', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('publication_id', Integer, ForeignKey('publication.id')),
- Column('issue', Integer),
- )
- location_table = Table('location', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('publication_id', Integer,
+ ForeignKey('publication.id')),
+ Column('issue', Integer))
+ location_table = Table(
+ 'location', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('issue_id', Integer, ForeignKey('issue.id')),
Column('ref', CHAR(3), default=''),
- Column('location_name_id', Integer, ForeignKey('location_name.id')),
- )
+ Column('location_name_id', Integer,
+ ForeignKey('location_name.id')))
location_name_table = Table('location_name', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(45), default=''),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(45), default=''))
magazine_table = Table('magazine', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('location_id', Integer, ForeignKey('location.id')),
- Column('page_size_id', Integer, ForeignKey('page_size.id')),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('location_id', Integer,
+ ForeignKey('location.id')),
+ Column('page_size_id', Integer,
+ ForeignKey('page_size.id')),)
page_table = Table('page', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('page_no', Integer),
- Column('type', CHAR(1), default='p'),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('page_no', Integer),
+ Column('type', CHAR(1), default='p'))
magazine_page_table = Table('magazine_page', metadata,
- Column('page_id', Integer, ForeignKey('page.id'), primary_key=True),
- Column('magazine_id', Integer, ForeignKey('magazine.id')),
- Column('orders', Text, default=''),
- )
- classified_page_table = Table('classified_page', metadata,
- Column('magazine_page_id', Integer, ForeignKey('magazine_page.page_id'), primary_key=True),
- Column('titles', String(45), default=''),
- )
+ Column('page_id', Integer,
+ ForeignKey('page.id'),
+ primary_key=True),
+ Column('magazine_id', Integer,
+ ForeignKey('magazine.id')),
+ Column('orders', Text, default=''))
+ classified_page_table = Table(
+ 'classified_page', metadata,
+ Column('magazine_page_id', Integer,
+ ForeignKey('magazine_page.page_id'),
+ primary_key=True),
+ Column('titles', String(45),
+ default=''),)
page_size_table = Table('page_size', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('width', Integer),
- Column('height', Integer),
- Column('name', String(45), default=''),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('width', Integer),
+ Column('height', Integer),
+ Column('name', String(45), default=''))
+
def _generate_round_trip_test(use_unions=False, use_joins=False):
def test_roundtrip(self):
publication_mapper = mapper(Publication, publication_table)
- issue_mapper = mapper(Issue, issue_table, properties = {
- 'publication': relationship(Publication, backref=backref('issues', cascade="all, delete-orphan")),
- })
+ issue_mapper = mapper(
+ Issue,
+ issue_table,
+ properties={
+ 'publication': relationship(Publication,
+ backref=backref(
+ 'issues',
+ cascade="all, delete-orphan"))
+ })
location_name_mapper = mapper(LocationName, location_name_table)
- location_mapper = mapper(Location, location_table, properties = {
- 'issue': relationship(Issue, backref=backref('locations', lazy='joined', cascade="all, delete-orphan")),
- '_name': relationship(LocationName),
- })
+ location_mapper = mapper(
+ Location, location_table,
+ properties={
+ 'issue':
+ relationship(
+ Issue,
+ backref=backref(
+ 'locations', lazy='joined',
+ cascade="all, delete-orphan")),
+ '_name': relationship(LocationName), })
page_size_mapper = mapper(PageSize, page_size_table)
- magazine_mapper = mapper(Magazine, magazine_table, properties = {
- 'location': relationship(Location, backref=backref('magazine', uselist=False)),
+ magazine_mapper = mapper(Magazine, magazine_table, properties={
+ 'location': relationship(Location, backref=backref('magazine',
+ uselist=False)),
'size': relationship(PageSize),
})
if use_unions:
- page_join = polymorphic_union(
- {
- 'm': page_table.join(magazine_page_table),
- 'c': page_table.join(magazine_page_table).join(classified_page_table),
- 'p': page_table.select(page_table.c.type=='p'),
- }, None, 'page_join')
- page_mapper = mapper(Page, page_table, with_polymorphic=('*', page_join), polymorphic_on=page_join.c.type, polymorphic_identity='p')
+ page_join = polymorphic_union({'m': page_table.join(
+ magazine_page_table),
+ 'c': page_table.join(
+ magazine_page_table).join(
+ classified_page_table),
+ 'p': page_table.select(
+ page_table.c.type == 'p'), },
+ None, 'page_join')
+ page_mapper = mapper(Page, page_table,
+ with_polymorphic=('*', page_join),
+ polymorphic_on=page_join.c.type,
+ polymorphic_identity='p')
elif use_joins:
- page_join = page_table.outerjoin(magazine_page_table).outerjoin(classified_page_table)
- page_mapper = mapper(Page, page_table, with_polymorphic=('*', page_join), polymorphic_on=page_table.c.type, polymorphic_identity='p')
+ page_join = page_table.outerjoin(
+ magazine_page_table).outerjoin(classified_page_table)
+ page_mapper = mapper(Page, page_table,
+ with_polymorphic=('*', page_join),
+ polymorphic_on=page_table.c.type,
+ polymorphic_identity='p')
else:
- page_mapper = mapper(Page, page_table, polymorphic_on=page_table.c.type, polymorphic_identity='p')
+ page_mapper = mapper(
+ Page, page_table, polymorphic_on=page_table.c.type,
+ polymorphic_identity='p')
if use_unions:
- magazine_join = polymorphic_union(
- {
- 'm': page_table.join(magazine_page_table),
- 'c': page_table.join(magazine_page_table).join(classified_page_table),
- }, None, 'page_join')
- magazine_page_mapper = mapper(MagazinePage, magazine_page_table, with_polymorphic=('*', magazine_join), inherits=page_mapper, polymorphic_identity='m', properties={
- 'magazine': relationship(Magazine, backref=backref('pages', order_by=magazine_join.c.page_no))
- })
+ magazine_join = polymorphic_union({'m': page_table.join(
+ magazine_page_table),
+ 'c': page_table.join(
+ magazine_page_table).join(
+ classified_page_table), },
+ None, 'page_join')
+ magazine_page_mapper = mapper(
+ MagazinePage, magazine_page_table,
+ with_polymorphic=('*', magazine_join),
+ inherits=page_mapper, polymorphic_identity='m',
+ properties={
+ 'magazine':
+ relationship(
+ Magazine,
+ backref=backref(
+ 'pages',
+ order_by=magazine_join.c.page_no))})
elif use_joins:
- magazine_join = page_table.join(magazine_page_table).outerjoin(classified_page_table)
- magazine_page_mapper = mapper(MagazinePage, magazine_page_table, with_polymorphic=('*', magazine_join), inherits=page_mapper, polymorphic_identity='m', properties={
- 'magazine': relationship(Magazine, backref=backref('pages', order_by=page_table.c.page_no))
- })
+ magazine_join = page_table.join(
+ magazine_page_table).outerjoin(classified_page_table)
+ magazine_page_mapper = mapper(
+ MagazinePage, magazine_page_table,
+ with_polymorphic=('*', magazine_join),
+ inherits=page_mapper, polymorphic_identity='m',
+ properties={
+ 'magazine':
+ relationship(
+ Magazine,
+ backref=backref(
+ 'pages', order_by=page_table.c.page_no))})
else:
- magazine_page_mapper = mapper(MagazinePage, magazine_page_table, inherits=page_mapper, polymorphic_identity='m', properties={
- 'magazine': relationship(Magazine, backref=backref('pages', order_by=page_table.c.page_no))
- })
+ magazine_page_mapper = mapper(
+ MagazinePage, magazine_page_table, inherits=page_mapper,
+ polymorphic_identity='m',
+ properties={
+ 'magazine':
+ relationship(
+ Magazine,
+ backref=backref(
+ 'pages', order_by=page_table.c.page_no))})
classified_page_mapper = mapper(ClassifiedPage,
- classified_page_table,
- inherits=magazine_page_mapper,
- polymorphic_identity='c',
- primary_key=[page_table.c.id])
-
+ classified_page_table,
+ inherits=magazine_page_mapper,
+ polymorphic_identity='c',
+ primary_key=[page_table.c.id])
session = create_session()
pub = Publication(name='Test')
- issue = Issue(issue=46,publication=pub)
- location = Location(ref='ABC',name='London',issue=issue)
+ issue = Issue(issue=46, publication=pub)
+ location = Location(ref='ABC', name='London', issue=issue)
- page_size = PageSize(name='A4',width=210,height=297)
+ page_size = PageSize(name='A4', width=210, height=297)
- magazine = Magazine(location=location,size=page_size)
+ magazine = Magazine(location=location, size=page_size)
- page = ClassifiedPage(magazine=magazine,page_no=1)
- page2 = MagazinePage(magazine=magazine,page_no=2)
- page3 = ClassifiedPage(magazine=magazine,page_no=3)
+ page = ClassifiedPage(magazine=magazine, page_no=1)
+ page2 = MagazinePage(magazine=magazine, page_no=2)
+ page3 = ClassifiedPage(magazine=magazine, page_no=3)
session.add(pub)
-
session.flush()
print([x for x in session])
session.expunge_all()
session.flush()
session.expunge_all()
- p = session.query(Publication).filter(Publication.name=="Test").one()
+ p = session.query(Publication).filter(Publication.name == "Test").one()
print(p.issues[0].locations[0].magazine.pages)
print([page, page2, page3])
- assert repr(p.issues[0].locations[0].magazine.pages) == repr([page, page2, page3]), repr(p.issues[0].locations[0].magazine.pages)
+ assert repr(
+ p.issues[0].locations[0].magazine.pages) == repr(
+ [page, page2, page3]), repr(
+ p.issues[0].locations[0].magazine.pages)
test_roundtrip = function_named(
- test_roundtrip, "test_%s" % (not use_union and (use_joins and "joins" or "select") or "unions"))
+ test_roundtrip, "test_%s" %
+ (not use_union and(use_joins and "joins" or "select") or "unions"))
setattr(MagazineTest, test_roundtrip.__name__, test_roundtrip)
+
for (use_union, use_join) in [(True, False), (False, True), (False, False)]:
_generate_round_trip_test(use_union, use_join)
-
-
global user_group_map
principals = Table('principals', metadata,
- Column('principal_id', Integer,
- Sequence('principal_id_seq', optional=False),
- primary_key=True),
- Column('name', String(50), nullable=False))
+ Column('principal_id', Integer,
+ Sequence('principal_id_seq', optional=False),
+ primary_key=True),
+ Column('name', String(50), nullable=False))
users = Table('prin_users', metadata,
- Column('principal_id', Integer,
- ForeignKey('principals.principal_id'), primary_key=True),
- Column('password', String(50), nullable=False),
- Column('email', String(50), nullable=False),
- Column('login_id', String(50), nullable=False))
+ Column('principal_id', Integer,
+ ForeignKey('principals.principal_id'),
+ primary_key=True),
+ Column('password', String(50), nullable=False),
+ Column('email', String(50), nullable=False),
+ Column('login_id', String(50), nullable=False))
groups = Table('prin_groups', metadata,
- Column('principal_id', Integer,
- ForeignKey('principals.principal_id'), primary_key=True))
-
- user_group_map = Table('prin_user_group_map', metadata,
- Column('user_id', Integer, ForeignKey( "prin_users.principal_id"),
- primary_key=True ),
- Column('group_id', Integer, ForeignKey( "prin_groups.principal_id"),
- primary_key=True ),
- )
+ Column(
+ 'principal_id', Integer,
+ ForeignKey('principals.principal_id'),
+ primary_key=True))
+
+ user_group_map = Table(
+ 'prin_user_group_map', metadata,
+ Column(
+ 'user_id', Integer, ForeignKey("prin_users.principal_id"),
+ primary_key=True),
+ Column(
+ 'group_id', Integer, ForeignKey("prin_groups.principal_id"),
+ primary_key=True),)
def test_basic(self):
class Principal(object):
mapper(Group, groups, inherits=Principal, properties={
'users': relationship(User, secondary=user_group_map,
- lazy='select', backref="groups")
- })
+ lazy='select', backref="groups")
+ })
g = Group(name="group1")
- g.users.append(User(name="user1", password="pw", email="foo@bar.com", login_id="lg1"))
+ g.users.append(
+ User(
+ name="user1", password="pw", email="foo@bar.com",
+ login_id="lg1"))
sess = create_session()
sess.add(g)
sess.flush()
# TODO: put an assertion
+
class InheritTest2(fixtures.MappedTest):
"""deals with inheritance and many-to-many relationships"""
@classmethod
def define_tables(cls, metadata):
global foo, bar, foo_bar
foo = Table('foo', metadata,
- Column('id', Integer, Sequence('foo_id_seq', optional=True),
- primary_key=True),
- Column('data', String(20)),
- )
+ Column('id', Integer,
+ Sequence('foo_id_seq', optional=True),
+ primary_key=True),
+ Column('data', String(20)))
bar = Table('bar', metadata,
- Column('bid', Integer, ForeignKey('foo.id'), primary_key=True),
- #Column('fid', Integer, ForeignKey('foo.id'), )
- )
+ Column('bid', Integer, ForeignKey('foo.id'),
+ primary_key=True))
foo_bar = Table('foo_bar', metadata,
- Column('foo_id', Integer, ForeignKey('foo.id')),
- Column('bar_id', Integer, ForeignKey('bar.bid')))
+ Column('foo_id', Integer, ForeignKey('foo.id')),
+ Column('bar_id', Integer, ForeignKey('bar.bid')))
def test_get(self):
class Foo(object):
def __init__(self, data=None):
self.data = data
- class Bar(Foo):pass
+
+ class Bar(Foo):
+ pass
mapper(Foo, foo)
mapper(Bar, bar, inherits=Foo)
self.data = data
mapper(Foo, foo)
+
class Bar(Foo):
pass
sess.flush()
sess.expunge_all()
- l = sess.query(Bar).all()
- print(l[0])
- print(l[0].foos)
- self.assert_unordered_result(l, Bar,
-# {'id':1, 'data':'barfoo', 'bid':1, 'foos':(Foo, [{'id':2,'data':'subfoo1'}, {'id':3,'data':'subfoo2'}])},
- {'id':b.id, 'data':'barfoo', 'foos':(Foo, [{'id':f1.id,'data':'subfoo1'}, {'id':f2.id,'data':'subfoo2'}])},
- )
+ result = sess.query(Bar).all()
+ print(result[0])
+ print(result[0].foos)
+ self.assert_unordered_result(result, Bar,
+ {'id': b.id,
+ 'data': 'barfoo',
+ 'foos': (
+ Foo, [{'id': f1.id,
+ 'data': 'subfoo1'},
+ {'id': f2.id,
+ 'data': 'subfoo2'}])})
+
class InheritTest3(fixtures.MappedTest):
"""deals with inheritance and many-to-many relationships"""
def define_tables(cls, metadata):
global foo, bar, blub, bar_foo, blub_bar, blub_foo
- # the 'data' columns are to appease SQLite which cant handle a blank INSERT
+ # the 'data' columns are to appease SQLite which cant handle a blank
+ # INSERT
foo = Table('foo', metadata,
- Column('id', Integer, Sequence('foo_seq', optional=True),
- primary_key=True),
- Column('data', String(20)))
+ Column('id', Integer, Sequence('foo_seq', optional=True),
+ primary_key=True),
+ Column('data', String(20)))
- bar = Table('bar', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- Column('bar_data', String(20)))
+ bar = Table('bar', metadata, Column('id', Integer, ForeignKey(
+ 'foo.id'), primary_key=True), Column('bar_data', String(20)))
blub = Table('blub', metadata,
- Column('id', Integer, ForeignKey('bar.id'), primary_key=True),
- Column('blub_data', String(20)))
+ Column('id', Integer, ForeignKey('bar.id'),
+ primary_key=True),
+ Column('blub_data', String(20)))
bar_foo = Table('bar_foo', metadata,
- Column('bar_id', Integer, ForeignKey('bar.id')),
- Column('foo_id', Integer, ForeignKey('foo.id')))
+ Column('bar_id', Integer, ForeignKey('bar.id')),
+ Column('foo_id', Integer, ForeignKey('foo.id')))
blub_bar = Table('bar_blub', metadata,
- Column('blub_id', Integer, ForeignKey('blub.id')),
- Column('bar_id', Integer, ForeignKey('bar.id')))
+ Column('blub_id', Integer, ForeignKey('blub.id')),
+ Column('bar_id', Integer, ForeignKey('bar.id')))
blub_foo = Table('blub_foo', metadata,
- Column('blub_id', Integer, ForeignKey('blub.id')),
- Column('foo_id', Integer, ForeignKey('foo.id')))
+ Column('blub_id', Integer, ForeignKey('blub.id')),
+ Column('foo_id', Integer, ForeignKey('foo.id')))
def test_basic(self):
class Foo(object):
def __init__(self, data=None):
self.data = data
+
def __repr__(self):
return "Foo id %d, data %s" % (self.id, self.data)
mapper(Foo, foo)
return "Bar id %d, data %s" % (self.id, self.data)
mapper(Bar, bar, inherits=Foo, properties={
- 'foos' :relationship(Foo, secondary=bar_foo, lazy='select')
+ 'foos': relationship(Foo, secondary=bar_foo, lazy='select')
})
sess = create_session()
sess.flush()
compare = [repr(b)] + sorted([repr(o) for o in b.foos])
sess.expunge_all()
- l = sess.query(Bar).all()
- print(repr(l[0]) + repr(l[0].foos))
- found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos])
+ result = sess.query(Bar).all()
+ print(repr(result[0]) + repr(result[0].foos))
+ found = [repr(result[0])] + sorted([repr(o) for o in result[0].foos])
eq_(found, compare)
def test_advanced(self):
class Foo(object):
def __init__(self, data=None):
self.data = data
+
def __repr__(self):
return "Foo id %d, data %s" % (self.id, self.data)
mapper(Foo, foo)
class Blub(Bar):
def __repr__(self):
return "Blub id %d, data %s, bars %s, foos %s" % (
- self.id, self.data, repr([b for b in self.bars]),
- repr([f for f in self.foos]))
+ self.id, self.data, repr([b for b in self.bars]),
+ repr([f for f in self.foos]))
mapper(Blub, blub, inherits=Bar, properties={
'bars': relationship(Bar, secondary=blub_bar, lazy='joined'),
blubid = bl1.id
sess.expunge_all()
- l = sess.query(Blub).all()
- print(l)
- self.assert_(repr(l[0]) == compare)
+ result = sess.query(Blub).all()
+ print(result)
+ self.assert_(repr(result[0]) == compare)
sess.expunge_all()
x = sess.query(Blub).filter_by(id=blubid).one()
print(x)
self.assert_(repr(x) == compare)
-
-
@classmethod
def define_tables(cls, metadata):
global Table1, Table1B, Table2, Table3, Data
- table1 = Table('table1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('related_id', Integer, ForeignKey('table1.id'), nullable=True),
- Column('type', String(30)),
- Column('name', String(30))
- )
-
- table2 = Table('table2', metadata,
- Column('id', Integer, ForeignKey('table1.id'), primary_key=True),
- )
-
- table3 = Table('table3', metadata,
- Column('id', Integer, ForeignKey('table1.id'), primary_key=True),
- )
-
- data = Table('data', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ table1 = Table(
+ 'table1', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column(
+ 'related_id', Integer, ForeignKey('table1.id'),
+ nullable=True),
+ Column('type', String(30)),
+ Column('name', String(30)))
+
+ table2 = Table(
+ 'table2', metadata,
+ Column(
+ 'id', Integer, ForeignKey('table1.id'),
+ primary_key=True),)
+
+ table3 = Table(
+ 'table3', metadata,
+ Column(
+ 'id', Integer, ForeignKey('table1.id'),
+ primary_key=True),)
+
+ data = Table(
+ 'data', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('node_id', Integer, ForeignKey('table1.id')),
- Column('data', String(30))
- )
+ Column('data', String(30)))
- #join = polymorphic_union(
- # {
- # 'table3' : table1.join(table3),
- # 'table2' : table1.join(table2),
- # 'table1' : table1.select(table1.c.type.in_(['table1', 'table1b'])),
- # }, None, 'pjoin')
+ # join = polymorphic_union(
+ # {
+ # 'table3' : table1.join(table3),
+ # 'table2' : table1.join(table2),
+ # 'table1' : table1.select(table1.c.type.in_(['table1', 'table1b'])),
+ # }, None, 'pjoin')
join = table1.outerjoin(table2).outerjoin(table3).alias('pjoin')
- #join = None
+ # join = None
class Table1(object):
def __init__(self, name, data=None):
self.name = name
if data is not None:
self.data = data
+
def __repr__(self):
- return "%s(%s, %s, %s)" % (self.__class__.__name__, self.id, repr(str(self.name)), repr(self.data))
+ return "%s(%s, %s, %s)" % (
+ self.__class__.__name__, self.id, repr(str(self.name)),
+ repr(self.data))
class Table1B(Table1):
pass
class Data(object):
def __init__(self, data):
self.data = data
+
def __repr__(self):
- return "%s(%s, %s)" % (self.__class__.__name__, self.id, repr(str(self.data)))
+ return "%s(%s, %s)" % (
+ self.__class__.__name__, self.id, repr(str(self.data)))
try:
- # this is how the mapping used to work. ensure that this raises an error now
- table1_mapper = mapper(Table1, table1,
- select_table=join,
- polymorphic_on=table1.c.type,
- polymorphic_identity='table1',
- properties={
- 'nxt': relationship(Table1,
- backref=backref('prev', foreignkey=join.c.id, uselist=False),
- uselist=False, primaryjoin=join.c.id==join.c.related_id),
- 'data':relationship(mapper(Data, data))
- })
+ # this is how the mapping used to work. ensure that this raises an
+ # error now
+ table1_mapper = mapper(
+ Table1, table1, select_table=join,
+ polymorphic_on=table1.c.type,
+ polymorphic_identity='table1',
+ properties={
+ 'nxt': relationship(
+ Table1,
+ backref=backref('prev',
+ foreignkey=join.c.id,
+ uselist=False),
+ uselist=False,
+ primaryjoin=join.c.id == join.c.related_id),
+ 'data': relationship(mapper(Data, data))
+ })
configure_mappers()
assert False
- except:
+ except Exception:
assert True
clear_mappers()
# currently, the "eager" relationships degrade to lazy relationships
# due to the polymorphic load.
- # the "nxt" relationship used to have a "lazy='joined'" on it, but the EagerLoader raises the "self-referential"
- # exception now. since eager loading would never work for that relationship anyway, its better that the user
+ # the "nxt" relationship used to have a "lazy='joined'" on it, but the
+ # EagerLoader raises the "self-referential"
+ # exception now. since eager loading would never work for that
+ # relationship anyway, its better that the user
# gets an exception instead of it silently not eager loading.
- # NOTE: using "nxt" instead of "next" to avoid 2to3 turning it into __next__() for some reason.
- table1_mapper = mapper(Table1, table1,
- #select_table=join,
- polymorphic_on=table1.c.type,
- polymorphic_identity='table1',
- properties={
- 'nxt': relationship(Table1,
- backref=backref('prev', remote_side=table1.c.id, uselist=False),
- uselist=False, primaryjoin=table1.c.id==table1.c.related_id),
- 'data':relationship(mapper(Data, data), lazy='joined', order_by=data.c.id)
- }
- )
-
- table1b_mapper = mapper(Table1B, inherits=table1_mapper, polymorphic_identity='table1b')
+ # NOTE: using "nxt" instead of "next" to avoid 2to3 turning it into
+ # __next__() for some reason.
+ table1_mapper = mapper(
+ Table1, table1,
+ # select_table=join,
+ polymorphic_on=table1.c.type,
+ polymorphic_identity='table1',
+ properties={
+ 'nxt': relationship(
+ Table1,
+ backref=backref(
+ 'prev', remote_side=table1.c.id, uselist=False),
+ uselist=False,
+ primaryjoin=table1.c.id == table1.c.related_id),
+ 'data': relationship(mapper(Data, data), lazy='joined',
+ order_by=data.c.id)
+ }
+ )
+
+ table1b_mapper = mapper(
+ Table1B, inherits=table1_mapper, polymorphic_identity='table1b')
table2_mapper = mapper(Table2, table2,
inherits=table1_mapper,
polymorphic_identity='table2')
- table3_mapper = mapper(Table3, table3, inherits=table1_mapper, polymorphic_identity='table3')
+ table3_mapper = mapper(
+ Table3, table3, inherits=table1_mapper,
+ polymorphic_identity='table3')
configure_mappers()
- assert table1_mapper.primary_key == (table1.c.id,), table1_mapper.primary_key
+ assert table1_mapper.primary_key == (
+ table1.c.id,), table1_mapper.primary_key
def test_one(self):
self._testlist([Table1, Table2, Table1, Table2])
self._testlist([Table3])
def test_three(self):
- self._testlist([Table2, Table1, Table1B, Table3, Table3, Table1B, Table1B, Table2, Table1])
+ self._testlist([Table2, Table1, Table1B, Table3,
+ Table3, Table1B, Table1B, Table2, Table1])
def test_four(self):
self._testlist([
- Table2('t2', [Data('data1'), Data('data2')]),
- Table1('t1', []),
- Table3('t3', [Data('data3')]),
- Table1B('t1b', [Data('data4'), Data('data5')])
- ])
+ Table2('t2', [Data('data1'), Data('data2')]),
+ Table1('t1', []),
+ Table3('t3', [Data('data3')]),
+ Table1B('t1b', [Data('data4'), Data('data5')])
+ ])
def _testlist(self, classes):
- sess = create_session( )
+ sess = create_session()
# create objects in a linked list
count = 1
node = n
original = repr(assertlist)
-
# clear and query forwards
sess.expunge_all()
node = sess.query(Table1).order_by(Table1.id).\
- filter(Table1.id==t.id).first()
+ filter(Table1.id == t.id).first()
assertlist = []
while (node):
assertlist.append(node)
# clear and query backwards
sess.expunge_all()
node = sess.query(Table1).order_by(Table1.id).\
- filter(Table1.id==obj.id).first()
+ filter(Table1.id == obj.id).first()
assertlist = []
while (node):
assertlist.insert(0, node)
# everything should match !
assert original == forwards == backwards
-
from test.orm import _fixtures
from sqlalchemy.testing import fixtures
+
class Person(fixtures.ComparableEntity):
pass
+
+
class Engineer(Person):
pass
+
+
class Manager(Person):
pass
+
+
class Boss(Manager):
pass
+
+
class Company(fixtures.ComparableEntity):
pass
+
class PolymorphTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global companies, people, engineers, managers, boss
companies = Table('companies', metadata,
- Column('company_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
-
- people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('company_id', Integer, ForeignKey('companies.company_id'),
- nullable=False),
- Column('name', String(50)),
- Column('type', String(30)))
-
- engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- Column('engineer_name', String(50)),
- Column('primary_language', String(50)),
- )
-
- managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- Column('manager_name', String(50))
- )
-
- boss = Table('boss', metadata,
- Column('boss_id', Integer, ForeignKey('managers.person_id'),
- primary_key=True),
- Column('golf_swing', String(30)),
- )
+ Column('company_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
+
+ people = Table(
+ 'people', metadata,
+ Column(
+ 'person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column(
+ 'company_id', Integer, ForeignKey('companies.company_id'),
+ nullable=False),
+ Column('name', String(50)),
+ Column('type', String(30)))
+
+ engineers = Table(
+ 'engineers', metadata,
+ Column(
+ 'person_id', Integer, ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)),
+ Column('engineer_name', String(50)),
+ Column('primary_language', String(50)))
+
+ managers = Table(
+ 'managers', metadata,
+ Column(
+ 'person_id', Integer, ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)),
+ Column('manager_name', String(50)))
+
+ boss = Table(
+ 'boss', metadata,
+ Column(
+ 'boss_id', Integer, ForeignKey('managers.person_id'),
+ primary_key=True),
+ Column('golf_swing', String(30)))
metadata.create_all()
+
class InsertOrderTest(PolymorphTest):
def test_insert_order(self):
"""test that classes of multiple types mix up mapper inserts
person_join = polymorphic_union(
{
- 'engineer':people.join(engineers),
- 'manager':people.join(managers),
- 'person':people.select(people.c.type=='person'),
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers),
+ 'person': people.select(people.c.type == 'person'),
}, None, 'pjoin')
person_mapper = mapper(Person, people,
- with_polymorphic=('*', person_join),
- polymorphic_on=person_join.c.type,
- polymorphic_identity='person')
+ with_polymorphic=('*', person_join),
+ polymorphic_on=person_join.c.type,
+ polymorphic_identity='person')
mapper(Engineer, engineers, inherits=person_mapper,
- polymorphic_identity='engineer')
+ polymorphic_identity='engineer')
mapper(Manager, managers, inherits=person_mapper,
- polymorphic_identity='manager')
+ polymorphic_identity='manager')
mapper(Company, companies, properties={
'employees': relationship(Person,
- backref='company',
- order_by=person_join.c.person_id)
+ backref='company',
+ order_by=person_join.c.person_id)
})
session = create_session()
c = Company(name='company1')
- c.employees.append(Manager(status='AAB', manager_name='manager1'
- , name='pointy haired boss'))
+ c.employees.append(
+ Manager(
+ status='AAB', manager_name='manager1',
+ name='pointy haired boss'))
c.employees.append(Engineer(status='BBA',
- engineer_name='engineer1',
- primary_language='java', name='dilbert'))
+ engineer_name='engineer1',
+ primary_language='java', name='dilbert'))
c.employees.append(Person(status='HHH', name='joesmith'))
c.employees.append(Engineer(status='CGG',
- engineer_name='engineer2',
- primary_language='python', name='wally'))
- c.employees.append(Manager(status='ABA', manager_name='manager2'
- , name='jsmith'))
+ engineer_name='engineer2',
+ primary_language='python', name='wally'))
+ c.employees.append(
+ Manager(
+ status='ABA', manager_name='manager2',
+ name='jsmith'))
session.add(c)
session.flush()
session.expunge_all()
eq_(session.query(Company).get(c.company_id), c)
+
class RoundTripTest(PolymorphTest):
pass
+
def _generate_round_trip_test(include_base, lazy_relationship,
- redefine_colprop, with_polymorphic):
+ redefine_colprop, with_polymorphic):
"""generates a round trip test.
include_base - whether or not to include the base 'person' type in
use_literal_join - primary join condition is explicitly specified
"""
+
def test_roundtrip(self):
if with_polymorphic == 'unions':
if include_base:
person_join = polymorphic_union(
{
- 'engineer':people.join(engineers),
- 'manager':people.join(managers),
- 'person':people.select(people.c.type=='person'),
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers),
+ 'person': people.select(people.c.type == 'person'),
}, None, 'pjoin')
else:
person_join = polymorphic_union(
{
- 'engineer':people.join(engineers),
- 'manager':people.join(managers),
+ 'engineer': people.join(engineers),
+ 'manager': people.join(managers),
}, None, 'pjoin')
manager_join = people.join(managers).outerjoin(boss)
manager_with_polymorphic = ['*', manager_join]
elif with_polymorphic == 'joins':
person_join = people.outerjoin(engineers).outerjoin(managers).\
- outerjoin(boss)
+ outerjoin(boss)
manager_join = people.join(managers).outerjoin(boss)
person_with_polymorphic = ['*', person_join]
manager_with_polymorphic = ['*', manager_join]
if redefine_colprop:
person_mapper = mapper(Person, people,
- with_polymorphic=person_with_polymorphic,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- properties= {'person_name':people.c.name})
+ with_polymorphic=person_with_polymorphic,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ properties={'person_name': people.c.name})
else:
person_mapper = mapper(Person, people,
- with_polymorphic=person_with_polymorphic,
- polymorphic_on=people.c.type,
- polymorphic_identity='person')
+ with_polymorphic=person_with_polymorphic,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(Engineer, engineers, inherits=person_mapper,
- polymorphic_identity='engineer')
+ polymorphic_identity='engineer')
mapper(Manager, managers, inherits=person_mapper,
- with_polymorphic=manager_with_polymorphic,
- polymorphic_identity='manager')
+ with_polymorphic=manager_with_polymorphic,
+ polymorphic_identity='manager')
mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
mapper(Company, companies, properties={
'employees': relationship(Person, lazy=lazy_relationship,
- cascade="all, delete-orphan",
- backref="company", order_by=people.c.person_id
- )
+ cascade="all, delete-orphan",
+ backref="company",
+ order_by=people.c.person_id)
})
if redefine_colprop:
person_attribute_name = 'name'
employees = [
- Manager(status='AAB', manager_name='manager1',
- **{person_attribute_name:'pointy haired boss'}),
- Engineer(status='BBA', engineer_name='engineer1',
- primary_language='java',
- **{person_attribute_name:'dilbert'}),
- ]
+ Manager(status='AAB', manager_name='manager1',
+ **{person_attribute_name: 'pointy haired boss'}),
+ Engineer(status='BBA', engineer_name='engineer1',
+ primary_language='java',
+ **{person_attribute_name: 'dilbert'}),
+ ]
if include_base:
- employees.append(Person(**{person_attribute_name:'joesmith'}))
+ employees.append(Person(**{person_attribute_name: 'joesmith'}))
employees += [
Engineer(status='CGG', engineer_name='engineer2',
primary_language='python',
- **{person_attribute_name:'wally'}),
+ **{person_attribute_name: 'wally'}),
Manager(status='ABA', manager_name='manager2',
- **{person_attribute_name:'jsmith'})
+ **{person_attribute_name: 'jsmith'})
]
pointy = employees[0]
session.expunge_all()
eq_(session.query(Person).filter(
- Person.person_id==dilbert.person_id).one(),
- dilbert)
+ Person.person_id == dilbert.person_id).one(),
+ dilbert)
session.expunge_all()
def go():
# the "people" selectable should be adapted to be "person_join"
eq_(
session.query(Person).filter(
- getattr(Person, person_attribute_name)=='dilbert'
- ).first(),
+ getattr(Person, person_attribute_name) == 'dilbert'
+ ).first(),
dilbert
)
assert session.query(Person).filter(
- getattr(Person, person_attribute_name)=='dilbert'
- ).first().person_id
+ getattr(Person, person_attribute_name) == 'dilbert'
+ ).first().person_id
eq_(
session.query(Engineer).filter(
- getattr(Person, person_attribute_name)=='dilbert'
- ).first(),
+ getattr(Person, person_attribute_name) == 'dilbert'
+ ).first(),
dilbert
)
session.expunge_all()
def go():
- session.query(Person).filter(getattr(Person,
- person_attribute_name)=='dilbert').first()
+ session.query(Person).filter(
+ getattr(Person, person_attribute_name) == 'dilbert').first()
self.assert_sql_count(testing.db, go, 1)
session.expunge_all()
- dilbert = session.query(Person).filter(getattr(Person,
- person_attribute_name)=='dilbert').first()
+ dilbert = session.query(Person).filter(
+ getattr(Person, person_attribute_name) == 'dilbert').first()
+
def go():
# assert that only primary table is queried for
# already-present-in-session
- d = session.query(Person).filter(getattr(Person,
- person_attribute_name)=='dilbert').first()
+ d = session.query(Person).filter(
+ getattr(Person, person_attribute_name) == 'dilbert').first()
self.assert_sql_count(testing.db, go, 1)
# test standalone orphans
daboss = Boss(status='BBB',
- manager_name='boss',
- golf_swing='fore',
- **{person_attribute_name:'daboss'})
+ manager_name='boss',
+ golf_swing='fore',
+ **{person_attribute_name: 'daboss'})
session.add(daboss)
assert_raises(sa_exc.DBAPIError, session.flush)
c = session.query(Company).first()
daboss.company = c
manager_list = [e for e in c.employees
- if isinstance(e, Manager)]
+ if isinstance(e, Manager)]
session.flush()
session.expunge_all()
eq_(session.query(Manager).order_by(Manager.person_id).all(),
- manager_list)
+ manager_list)
c = session.query(Company).first()
session.delete(c)
test_roundtrip = function_named(
test_roundtrip, "test_%s%s%s_%s" % (
- (lazy_relationship and "lazy" or "eager"),
- (include_base and "_inclbase" or ""),
- (redefine_colprop and "_redefcol" or ""),
- with_polymorphic))
+ (lazy_relationship and "lazy" or "eager"),
+ (include_base and "_inclbase" or ""),
+ (redefine_colprop and "_redefcol" or ""),
+ with_polymorphic))
setattr(RoundTripTest, test_roundtrip.__name__, test_roundtrip)
+
for lazy_relationship in [True, False]:
for redefine_colprop in [True, False]:
for with_polymorphic in ['unions', 'joins', 'auto', 'none']:
if with_polymorphic == 'unions':
for include_base in [True, False]:
- _generate_round_trip_test(include_base,
- lazy_relationship,
- redefine_colprop, with_polymorphic)
+ _generate_round_trip_test(
+ include_base, lazy_relationship, redefine_colprop,
+ with_polymorphic)
else:
_generate_round_trip_test(False,
- lazy_relationship,
- redefine_colprop, with_polymorphic)
-
+ lazy_relationship,
+ redefine_colprop, with_polymorphic)
from sqlalchemy import func, desc, select
-from sqlalchemy.orm import interfaces, create_session, joinedload, joinedload_all, \
- subqueryload, subqueryload_all, aliased,\
- class_mapper, with_polymorphic
+from sqlalchemy.orm import (interfaces, create_session, joinedload,
+ joinedload_all, subqueryload, subqueryload_all,
+ aliased, class_mapper, with_polymorphic)
from sqlalchemy import exc as sa_exc
from sqlalchemy import testing
_PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
_PolymorphicAliasedJoins
+
class _PolymorphicTestBase(object):
__backend__ = True
global companies, paperwork, machines
people, engineers, managers, boss,\
companies, paperwork, machines = \
- cls.tables.people, cls.tables.engineers, \
+ cls.tables.people, cls.tables.engineers, \
cls.tables.managers, cls.tables.boss,\
cls.tables.companies, cls.tables.paperwork, cls.tables.machines
global c1, c2, e1, e2, e3, b1, m1
c1, c2, all_employees, c1_employees, c2_employees = \
cls.c1, cls.c2, cls.all_employees, \
- cls.c1_employees, cls.c2_employees
+ cls.c1_employees, cls.c2_employees
e1, e2, e3, b1, m1 = \
cls.e1, cls.e2, cls.e3, cls.b1, cls.m1
-
def test_loads_at_once(self):
"""
Test that all objects load from the full query, when
"""
sess = create_session()
+
def go():
eq_(
sess.query(Person).order_by(Person.person_id).all(),
all_employees)
- count = {'':14, 'Polymorphic':9}.get(self.select_type, 10)
+ count = {'': 14, 'Polymorphic': 9}.get(self.select_type, 10)
self.assert_sql_count(testing.db, go, count)
def test_primary_eager_aliasing_one(self):
# not loading the subclass table, the joinedload doesn't happen.
sess = create_session()
+
def go():
eq_(sess.query(Person).order_by(Person.person_id)
.options(joinedload(Engineer.machines))[1:3],
all_employees[1:3])
- count = {'':6, 'Polymorphic':3}.get(self.select_type, 4)
+ count = {'': 6, 'Polymorphic': 3}.get(self.select_type, 4)
self.assert_sql_count(testing.db, go, count)
def test_primary_eager_aliasing_two(self):
sess = create_session()
+
def go():
eq_(sess.query(Person).order_by(Person.person_id)
.options(subqueryload(Engineer.machines)).all(),
all_employees)
- count = {'':14, 'Polymorphic':7}.get(self.select_type, 8)
+ count = {'': 14, 'Polymorphic': 7}.get(self.select_type, 8)
self.assert_sql_count(testing.db, go, count)
def test_primary_eager_aliasing_three(self):
# assert the JOINs don't over JOIN
sess = create_session()
+
def go():
eq_(sess.query(Person).with_polymorphic('*')
.order_by(Person.person_id)
sess.query(Person).with_polymorphic('*')
.options(joinedload(Engineer.machines))
.limit(2).offset(1).with_labels().subquery()
- ).scalar(),
- 2)
+ ).scalar(), 2)
def test_get_one(self):
"""
.filter(Paperwork.description.like('%#2%')).all(),
[m1])
-
def test_join_from_with_polymorphic_aliased_one(self):
sess = create_session()
eq_(sess.query(Person)
.all(),
expected)
-
def test_subclass_option_pathing(self):
from sqlalchemy.orm import defer
sess = create_session()
dilbert = sess.query(Person).\
- options(defer(Engineer.machines, Machine.name)).\
- filter(Person.name == 'dilbert').first()
+ options(defer(Engineer.machines, Machine.name)).\
+ filter(Person.name == 'dilbert').first()
m = dilbert.machines[0]
assert 'name' not in m.__dict__
eq_(m.name, 'IBM ThinkPad')
def test_with_polymorphic_one(self):
sess = create_session()
+
def go():
eq_(sess.query(Person)
.with_polymorphic(Engineer)
self._emps_wo_relationships_fixture()[0:1])
self.assert_sql_count(testing.db, go, 1)
-
def test_with_polymorphic_two(self):
sess = create_session()
+
def go():
eq_(sess.query(Person)
.with_polymorphic('*').order_by(Person.person_id).all(),
def test_with_polymorphic_three(self):
sess = create_session()
+
def go():
eq_(sess.query(Person)
.with_polymorphic(Engineer).
- order_by(Person.person_id).all(),
+ order_by(Person.person_id).all(),
self._emps_wo_relationships_fixture())
self.assert_sql_count(testing.db, go, 3)
def test_with_polymorphic_four(self):
sess = create_session()
+
def go():
eq_(sess.query(Person)
.with_polymorphic(
def test_with_polymorphic_five(self):
sess = create_session()
+
def go():
# limit the polymorphic join down to just "Person",
# overriding select_table
sess = create_session()
assert_raises(sa_exc.InvalidRequestError,
- sess.query(Person).with_polymorphic, Paperwork)
+ sess.query(Person).with_polymorphic, Paperwork)
assert_raises(sa_exc.InvalidRequestError,
- sess.query(Engineer).with_polymorphic, Boss)
+ sess.query(Engineer).with_polymorphic, Boss)
assert_raises(sa_exc.InvalidRequestError,
- sess.query(Engineer).with_polymorphic, Person)
+ sess.query(Engineer).with_polymorphic, Person)
def test_with_polymorphic_seven(self):
sess = create_session()
order_by(Person.person_id).all(),
self._emps_wo_relationships_fixture())
-
def test_relationship_to_polymorphic_one(self):
expected = self._company_with_emps_machines_fixture()
sess = create_session()
+
def go():
# test load Companies with lazy load to 'employees'
eq_(sess.query(Company).all(), expected)
- count = {'':10, 'Polymorphic':5}.get(self.select_type, 6)
+ count = {'': 10, 'Polymorphic': 5}.get(self.select_type, 6)
self.assert_sql_count(testing.db, go, count)
def test_relationship_to_polymorphic_two(self):
expected = self._company_with_emps_machines_fixture()
sess = create_session()
+
def go():
# with #2438, of_type() is recognized. This
# overrides the with_polymorphic of the mapper
expected)
# in the old case, we would get this
- #count = {'':7, 'Polymorphic':1}.get(self.select_type, 2)
+ # count = {'':7, 'Polymorphic':1}.get(self.select_type, 2)
# query one is company->Person/Engineer->Machines
# query two is managers + boss for row #3
sess = create_session()
sess = create_session()
+
def go():
eq_(sess.query(Company)
.options(subqueryload_all(
# the old case where subqueryload_all
# didn't work with of_tyoe
- #count = { '':8, 'Joins':4, 'Unions':4, 'Polymorphic':3,
+ # count = { '':8, 'Joins':4, 'Unions':4, 'Polymorphic':3,
# 'AliasedJoins':4}[self.select_type]
# query one is company->Person/Engineer->Machines
count = 5
self.assert_sql_count(testing.db, go, count)
-
def test_joinedload_on_subclass(self):
sess = create_session()
expected = [
.with_polymorphic('*')
.options(joinedload(Engineer.machines))
.filter(Person.name == 'dilbert').all(),
- expected)
+ expected)
self.assert_sql_count(testing.db, go, 1)
def test_subqueryload_on_subclass(self):
machines=[
Machine(name="IBM ThinkPad"),
Machine(name="IPhone")])]
+
def go():
# test load People with subqueryload to engineers + machines
eq_(sess.query(Person)
.with_polymorphic('*')
.options(subqueryload(Engineer.machines))
.filter(Person.name == 'dilbert').all(),
- expected)
+ expected)
self.assert_sql_count(testing.db, go, 2)
def test_query_subclass_join_to_base_relationship(self):
.filter(Machine.name.ilike("%thinkpad%")).all(),
[c1])
-
def test_join_to_subclass_eight(self):
sess = create_session()
eq_(sess.query(Person)
.filter(Paperwork.description.like('%#%')).all(),
[c1, c2])
-
def test_explicit_polymorphic_join_one(self):
sess = create_session()
primary_language='java',
person_id=1,
type='engineer'),
- 'MegaCorp, Inc.'),
+ 'MegaCorp, Inc.'),
(Engineer(
status='regular engineer',
engineer_name='wally',
primary_language='c++',
person_id=2,
type='engineer'),
- 'MegaCorp, Inc.'),
+ 'MegaCorp, Inc.'),
(Engineer(
status='elbonian engineer',
engineer_name='vlad',
primary_language='cobol',
person_id=5,
type='engineer'),
- 'Elbonia, Inc.')]
+ 'Elbonia, Inc.')]
eq_(sess.query(Engineer, Company.name)
.join(Company.employees)
.order_by(Person.person_id)
.order_by(Company.name).all(),
expected)
- #def test_mixed_entities(self):
+ # def test_mixed_entities(self):
# sess = create_session()
# TODO: I think raise error on these for now. different
# inheritance/loading schemes have different results here,
# sess.query(Person.name, Engineer.primary_language).all(),
# [])
- #def test_mixed_entities(self):
+ # def test_mixed_entities(self):
# sess = create_session()
# eq_(sess.query(
# Person.name,
correlate(paliased).as_scalar() == "Elbonia, Inc.").all(),
[(e3.name, )])
+
class PolymorphicTest(_PolymorphicTestBase, _Polymorphic):
def test_join_to_subclass_four(self):
sess = create_session()
pass
-class PolymorphicPolymorphicTest(_PolymorphicTestBase, _PolymorphicPolymorphic):
+class PolymorphicPolymorphicTest(
+ _PolymorphicTestBase, _PolymorphicPolymorphic):
__dialect__ = 'default'
def test_aliased_not_polluted_by_join(self):
sess = create_session()
palias = aliased(Person)
self.assert_compile(
- sess.query(palias, Company.name)
- .order_by(palias.person_id)
- .join(Person, Company.employees)
- .filter(palias.name == 'dilbert'),
+ sess.query(palias, Company.name).order_by(palias.person_id).
+ join(Person, Company.employees).filter(palias.name == 'dilbert'),
"SELECT anon_1.people_person_id AS anon_1_people_person_id, "
"anon_1.people_company_id AS anon_1_people_company_id, "
"anon_1.people_name AS anon_1_people_name, "
"anon_1.people_type AS anon_1_people_type, "
"anon_1.engineers_person_id AS anon_1_engineers_person_id, "
"anon_1.engineers_status AS anon_1_engineers_status, "
- "anon_1.engineers_engineer_name AS anon_1_engineers_engineer_name, "
+ "anon_1.engineers_engineer_name AS anon_1_engineers_engineer_name, " # noqa
"anon_1.engineers_primary_language AS "
- "anon_1_engineers_primary_language, "
+ "anon_1_engineers_primary_language, "
"anon_1.managers_person_id AS anon_1_managers_person_id, "
"anon_1.managers_status AS anon_1_managers_status, "
"anon_1.managers_manager_name AS anon_1_managers_manager_name, "
"anon_1.boss_golf_swing AS anon_1_boss_golf_swing, "
"companies.name AS companies_name "
"FROM (SELECT people.person_id AS people_person_id, "
- "people.company_id AS people_company_id, "
- "people.name AS people_name, people.type AS people_type, "
- "engineers.person_id AS engineers_person_id, "
- "engineers.status AS engineers_status, "
- "engineers.engineer_name AS engineers_engineer_name, "
- "engineers.primary_language AS engineers_primary_language, "
- "managers.person_id AS managers_person_id, "
- "managers.status AS managers_status, "
- "managers.manager_name AS managers_manager_name, "
- "boss.boss_id AS boss_boss_id, "
- "boss.golf_swing AS boss_golf_swing "
- "FROM people LEFT OUTER JOIN engineers "
- "ON people.person_id = engineers.person_id "
- "LEFT OUTER JOIN managers "
- "ON people.person_id = managers.person_id LEFT OUTER JOIN boss "
- "ON managers.person_id = boss.boss_id) AS anon_1, "
- "companies JOIN "
- "(people LEFT OUTER JOIN engineers "
- "ON people.person_id = engineers.person_id "
- "LEFT OUTER JOIN managers "
- "ON people.person_id = managers.person_id "
- "LEFT OUTER JOIN boss ON managers.person_id = boss.boss_id) "
- "ON companies.company_id = people.company_id "
- "WHERE anon_1.people_name = :people_name_1 "
- "ORDER BY anon_1.people_person_id"
- )
+ "people.company_id AS people_company_id, "
+ "people.name AS people_name, people.type AS people_type, "
+ "engineers.person_id AS engineers_person_id, "
+ "engineers.status AS engineers_status, "
+ "engineers.engineer_name AS engineers_engineer_name, "
+ "engineers.primary_language AS engineers_primary_language, "
+ "managers.person_id AS managers_person_id, "
+ "managers.status AS managers_status, "
+ "managers.manager_name AS managers_manager_name, "
+ "boss.boss_id AS boss_boss_id, "
+ "boss.golf_swing AS boss_golf_swing "
+ "FROM people LEFT OUTER JOIN engineers "
+ "ON people.person_id = engineers.person_id "
+ "LEFT OUTER JOIN managers "
+ "ON people.person_id = managers.person_id LEFT OUTER JOIN boss "
+ "ON managers.person_id = boss.boss_id) AS anon_1, "
+ "companies JOIN "
+ "(people LEFT OUTER JOIN engineers "
+ "ON people.person_id = engineers.person_id "
+ "LEFT OUTER JOIN managers "
+ "ON people.person_id = managers.person_id "
+ "LEFT OUTER JOIN boss ON managers.person_id = boss.boss_id) "
+ "ON companies.company_id = people.company_id "
+ "WHERE anon_1.people_name = :people_name_1 "
+ "ORDER BY anon_1.people_person_id")
def test_flat_aliased_w_select_from(self):
sess = create_session()
palias = aliased(Person, flat=True)
self.assert_compile(
- sess.query(palias, Company.name)
- .select_from(palias)
- .order_by(palias.person_id)
- .join(Person, Company.employees)
- .filter(palias.name == 'dilbert'),
+ sess.query(palias, Company.name).
+ select_from(palias).order_by(palias.person_id).join(
+ Person, Company.employees).filter(palias.name == 'dilbert'),
"SELECT people_1.person_id AS people_1_person_id, "
"people_1.company_id AS people_1_company_id, "
"people_1.name AS people_1_name, people_1.type AS people_1_type, "
"ON managers_1.person_id = boss_1.boss_id, "
"companies JOIN (people LEFT OUTER JOIN engineers "
"ON people.person_id = engineers.person_id "
- "LEFT OUTER JOIN managers ON people.person_id = managers.person_id "
+ "LEFT OUTER JOIN managers "
+ "ON people.person_id = managers.person_id "
"LEFT OUTER JOIN boss ON managers.person_id = boss.boss_id) "
"ON companies.company_id = people.company_id "
- "WHERE people_1.name = :name_1 ORDER BY people_1.person_id"
- )
+ "WHERE people_1.name = :name_1 ORDER BY people_1.person_id")
class PolymorphicUnionsTest(_PolymorphicTestBase, _PolymorphicUnions):
super(PolymorphicUnionsTest, self).test_correlation_one()
-class PolymorphicAliasedJoinsTest(_PolymorphicTestBase, _PolymorphicAliasedJoins):
+class PolymorphicAliasedJoinsTest(
+ _PolymorphicTestBase, _PolymorphicAliasedJoins):
@testing.fails()
def test_correlation_one(self):
super(PolymorphicAliasedJoinsTest, self).test_correlation_one()
+
class PolymorphicJoinsTest(_PolymorphicTestBase, _PolymorphicJoins):
pass
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Table, Column
+
class InheritTest(fixtures.MappedTest):
- """tests some various inheritance round trips involving a particular set of polymorphic inheritance relationships"""
+ """tests some various inheritance round trips involving a particular set of
+ polymorphic inheritance relationships"""
@classmethod
def define_tables(cls, metadata):
global products_table, specification_table, documents_table
global Product, Detail, Assembly, SpecLine, Document, RasterDocument
- products_table = Table('products', metadata,
- Column('product_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('product_type', String(128)),
- Column('name', String(128)),
- Column('mark', String(128)),
- )
+ products_table = Table(
+ 'products', metadata,
+ Column('product_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('product_type', String(128)),
+ Column('name', String(128)),
+ Column('mark', String(128)),)
- specification_table = Table('specification', metadata,
- Column('spec_line_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ specification_table = Table(
+ 'specification', metadata,
+ Column('spec_line_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('master_id', Integer, ForeignKey("products.product_id"),
- nullable=True),
+ nullable=True),
Column('slave_id', Integer, ForeignKey("products.product_id"),
- nullable=True),
- Column('quantity', Float, default=1.),
- )
+ nullable=True),
+ Column('quantity', Float, default=1.))
- documents_table = Table('documents', metadata,
- Column('document_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ documents_table = Table(
+ 'documents', metadata,
+ Column('document_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('document_type', String(128)),
Column('product_id', Integer, ForeignKey('products.product_id')),
- Column('create_date', DateTime, default=lambda:datetime.now()),
- Column('last_updated', DateTime, default=lambda:datetime.now(),
- onupdate=lambda:datetime.now()),
+ Column('create_date', DateTime, default=lambda: datetime.now()),
+ Column('last_updated', DateTime, default=lambda: datetime.now(),
+ onupdate=lambda: datetime.now()),
Column('name', String(128)),
Column('data', LargeBinary),
- Column('size', Integer, default=0),
- )
+ Column('size', Integer, default=0))
class Product(object):
def __init__(self, name, mark=''):
self.name = name
self.mark = mark
+
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.name)
class Assembly(Product):
def __repr__(self):
- return Product.__repr__(self) + " " + " ".join([x + "=" + repr(getattr(self, x, None)) for x in ['specification', 'documents']])
+ return Product.__repr__(self) + " " + " ".join(
+ [x + "=" + repr(getattr(self, x, None))
+ for x in ['specification', 'documents']])
class SpecLine(object):
def __init__(self, master=None, slave=None, quantity=1):
return '<%s %.01f %s>' % (
self.__class__.__name__,
self.quantity or 0.,
- repr(self.slave)
- )
+ repr(self.slave))
class Document(object):
def __init__(self, name, data=None):
self.name = name
self.data = data
+
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.name)
def test_one(self):
product_mapper = mapper(Product, products_table,
- polymorphic_on=products_table.c.product_type,
- polymorphic_identity='product')
+ polymorphic_on=products_table.c.product_type,
+ polymorphic_identity='product')
detail_mapper = mapper(Detail, inherits=product_mapper,
- polymorphic_identity='detail')
+ polymorphic_identity='detail')
assembly_mapper = mapper(Assembly, inherits=product_mapper,
- polymorphic_identity='assembly')
+ polymorphic_identity='assembly')
- specification_mapper = mapper(SpecLine, specification_table,
+ specification_mapper = mapper(
+ SpecLine, specification_table,
properties=dict(
- master=relationship(Assembly,
- foreign_keys=[specification_table.c.master_id],
- primaryjoin=specification_table.c.master_id==products_table.c.product_id,
- lazy='select', backref=backref('specification'),
+ master=relationship(
+ Assembly, foreign_keys=[specification_table.c.master_id],
+ primaryjoin=specification_table.c.master_id ==
+ products_table.c.product_id, lazy='select',
+ backref=backref('specification'),
uselist=False),
- slave=relationship(Product,
- foreign_keys=[specification_table.c.slave_id],
- primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
- lazy='select', uselist=False),
- quantity=specification_table.c.quantity,
- )
- )
+ slave=relationship(
+ Product, foreign_keys=[specification_table.c.slave_id],
+ primaryjoin=specification_table.c.slave_id ==
+ products_table.c.product_id, lazy='select', uselist=False),
+ quantity=specification_table.c.quantity))
- session = create_session( )
+ session = create_session()
a1 = Assembly(name='a1')
new = repr(a1)
print(orig)
print(new)
- assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Product p1>>, <SpecLine 1.0 <Detail d1>>] documents=None'
+ assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 ' \
+ '<Product p1>>, <SpecLine 1.0 <Detail d1>>] documents=None'
def test_two(self):
product_mapper = mapper(Product, products_table,
- polymorphic_on=products_table.c.product_type,
- polymorphic_identity='product')
+ polymorphic_on=products_table.c.product_type,
+ polymorphic_identity='product')
detail_mapper = mapper(Detail, inherits=product_mapper,
- polymorphic_identity='detail')
+ polymorphic_identity='detail')
- specification_mapper = mapper(SpecLine, specification_table,
+ specification_mapper = mapper(
+ SpecLine, specification_table,
properties=dict(
- slave=relationship(Product,
- foreign_keys=[specification_table.c.slave_id],
- primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
- lazy='select', uselist=False),
- )
- )
+ slave=relationship(
+ Product, foreign_keys=[specification_table.c.slave_id],
+ primaryjoin=specification_table.c.slave_id ==
+ products_table.c.product_id, lazy='select',
+ uselist=False)))
- session = create_session( )
+ session = create_session()
s = SpecLine(slave=Product(name='p1'))
s2 = SpecLine(slave=Detail(name='d1'))
new = repr(session.query(SpecLine).all())
print(orig)
print(new)
- assert orig == new == '[<SpecLine 1.0 <Product p1>>, <SpecLine 1.0 <Detail d1>>]'
+ assert orig == new == '[<SpecLine 1.0 <Product p1>>, ' \
+ '<SpecLine 1.0 <Detail d1>>]'
def test_three(self):
product_mapper = mapper(Product, products_table,
- polymorphic_on=products_table.c.product_type,
- polymorphic_identity='product')
+ polymorphic_on=products_table.c.product_type,
+ polymorphic_identity='product')
detail_mapper = mapper(Detail, inherits=product_mapper,
- polymorphic_identity='detail')
+ polymorphic_identity='detail')
assembly_mapper = mapper(Assembly, inherits=product_mapper,
- polymorphic_identity='assembly')
+ polymorphic_identity='assembly')
- specification_mapper = mapper(SpecLine, specification_table,
+ specification_mapper = mapper(
+ SpecLine, specification_table,
properties=dict(
- master=relationship(Assembly, lazy='joined', uselist=False,
+ master=relationship(
+ Assembly, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.master_id],
- primaryjoin=specification_table.c.master_id==products_table.c.product_id,
- backref=backref('specification', cascade="all, delete-orphan"),
- ),
- slave=relationship(Product, lazy='joined', uselist=False,
+ primaryjoin=specification_table.c.master_id ==
+ products_table.c.product_id,
+ backref=backref(
+ 'specification', cascade="all, delete-orphan")),
+ slave=relationship(
+ Product, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.slave_id],
- primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
- ),
- quantity=specification_table.c.quantity,
- )
- )
+ primaryjoin=specification_table.c.slave_id ==
+ products_table.c.product_id,),
+ quantity=specification_table.c.quantity))
- document_mapper = mapper(Document, documents_table,
+ document_mapper = mapper(
+ Document, documents_table,
polymorphic_on=documents_table.c.document_type,
polymorphic_identity='document',
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
- product=relationship(Product, lazy='select', backref=backref('documents', cascade="all, delete-orphan")),
- ),
- )
- raster_document_mapper = mapper(RasterDocument, inherits=document_mapper,
+ product=relationship(
+ Product, lazy='select',
+ backref=backref(
+ 'documents', cascade="all, delete-orphan"))))
+ raster_document_mapper = mapper(
+ RasterDocument, inherits=document_mapper,
polymorphic_identity='raster_document')
session = create_session()
new = repr(a1)
print(orig)
print(new)
- assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
+ assert orig == new == '<Assembly a1> specification=' \
+ '[<SpecLine 1.0 <Detail d1>>] ' \
+ 'documents=[<Document doc1>, <RasterDocument doc2>]'
def test_four(self):
- """this tests the RasterDocument being attached to the Assembly, but *not* the Document. this means only
- a "sub-class" task, i.e. corresponding to an inheriting mapper but not the base mapper, is created. """
+ """this tests the RasterDocument being attached to the Assembly, but
+ *not* the Document. this means only a "sub-class" task, i.e.
+ corresponding to an inheriting mapper but not the base mapper,
+ is created. """
product_mapper = mapper(Product, products_table,
- polymorphic_on=products_table.c.product_type,
- polymorphic_identity='product')
+ polymorphic_on=products_table.c.product_type,
+ polymorphic_identity='product')
detail_mapper = mapper(Detail, inherits=product_mapper,
- polymorphic_identity='detail')
+ polymorphic_identity='detail')
assembly_mapper = mapper(Assembly, inherits=product_mapper,
- polymorphic_identity='assembly')
+ polymorphic_identity='assembly')
- document_mapper = mapper(Document, documents_table,
+ document_mapper = mapper(
+ Document, documents_table,
polymorphic_on=documents_table.c.document_type,
polymorphic_identity='document',
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
- product=relationship(Product, lazy='select', backref=backref('documents', cascade="all, delete-orphan")),
- ),
- )
- raster_document_mapper = mapper(RasterDocument, inherits=document_mapper,
+ product=relationship(
+ Product, lazy='select',
+ backref=backref(
+ 'documents', cascade="all, delete-orphan"))))
+ raster_document_mapper = mapper(
+ RasterDocument, inherits=document_mapper,
polymorphic_identity='raster_document')
- session = create_session( )
+ session = create_session()
a1 = Assembly(name='a1')
a1.documents.append(RasterDocument('doc2'))
new = repr(a1)
print(orig)
print(new)
- assert orig == new == '<Assembly a1> specification=None documents=[<RasterDocument doc2>]'
+ assert orig == new == '<Assembly a1> specification=None documents=' \
+ '[<RasterDocument doc2>]'
del a1.documents[0]
session.flush()
def test_five(self):
"""tests the late compilation of mappers"""
- specification_mapper = mapper(SpecLine, specification_table,
+ specification_mapper = mapper(
+ SpecLine, specification_table,
properties=dict(
- master=relationship(Assembly, lazy='joined', uselist=False,
+ master=relationship(
+ Assembly, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.master_id],
- primaryjoin=specification_table.c.master_id==products_table.c.product_id,
- backref=backref('specification'),
- ),
- slave=relationship(Product, lazy='joined', uselist=False,
+ primaryjoin=specification_table.c.master_id ==
+ products_table.c.product_id,
+ backref=backref('specification')),
+ slave=relationship(
+ Product, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.slave_id],
- primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
- ),
- quantity=specification_table.c.quantity,
- )
- )
+ primaryjoin=specification_table.c.slave_id ==
+ products_table.c.product_id,),
+ quantity=specification_table.c.quantity))
- product_mapper = mapper(Product, products_table,
+ product_mapper = mapper(
+ Product, products_table,
polymorphic_on=products_table.c.product_type,
polymorphic_identity='product', properties={
- 'documents' : relationship(Document, lazy='select',
- backref='product', cascade='all, delete-orphan'),
- })
+ 'documents': relationship(Document, lazy='select',
+ backref='product',
+ cascade='all, delete-orphan')})
detail_mapper = mapper(Detail, inherits=Product,
- polymorphic_identity='detail')
+ polymorphic_identity='detail')
- document_mapper = mapper(Document, documents_table,
+ document_mapper = mapper(
+ Document, documents_table,
polymorphic_on=documents_table.c.document_type,
polymorphic_identity='document',
properties=dict(
name=documents_table.c.name,
- data=deferred(documents_table.c.data),
- ),
- )
+ data=deferred(documents_table.c.data)))
raster_document_mapper = mapper(RasterDocument, inherits=Document,
- polymorphic_identity='raster_document')
+ polymorphic_identity='raster_document')
assembly_mapper = mapper(Assembly, inherits=Product,
- polymorphic_identity='assembly')
+ polymorphic_identity='assembly')
session = create_session()
new = repr(a1)
print(orig)
print(new)
- assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
-
+ assert orig == new == '<Assembly a1> specification=' \
+ '[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, ' \
+ '<RasterDocument doc2>]'
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import assert_raises, eq_, is_
+
class Company(fixtures.ComparableEntity):
pass
+
+
class Person(fixtures.ComparableEntity):
pass
+
+
class Engineer(Person):
pass
+
+
class Manager(Person):
pass
+
+
class Boss(Manager):
pass
+
+
class Machine(fixtures.ComparableEntity):
pass
+
+
class Paperwork(fixtures.ComparableEntity):
pass
+
class SelfReferentialTestJoinedToBase(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
Table('engineers', metadata,
- Column('person_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('primary_language', String(50)),
- Column('reports_to_id', Integer,
- ForeignKey('people.person_id')))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('primary_language', String(50)),
+ Column('reports_to_id', Integer,
+ ForeignKey('people.person_id')))
@classmethod
def setup_mappers(cls):
engineers, people = cls.tables.engineers, cls.tables.people
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person')
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(Engineer, engineers,
- inherits=Person,
- inherit_condition=engineers.c.person_id == people.c.person_id,
- polymorphic_identity='engineer',
- properties={
- 'reports_to':relationship(
- Person,
- primaryjoin=
- people.c.person_id == engineers.c.reports_to_id)})
+ inherits=Person,
+ inherit_condition=engineers.c.person_id == people.c.person_id,
+ polymorphic_identity='engineer',
+ properties={
+ 'reports_to': relationship(
+ Person,
+ primaryjoin=(
+ people.c.person_id == engineers.c.reports_to_id))})
def test_has(self):
p1 = Person(name='dogbert')
sess.flush()
eq_(sess.query(Engineer)
.filter(Engineer.reports_to
- .of_type(Engineer)
- .has(Engineer.name == 'dilbert'))
+ .of_type(Engineer)
+ .has(Engineer.name == 'dilbert'))
.first(),
e2)
.filter(Person.name == 'dogbert').first(),
Engineer(name='dilbert'))
+
class SelfReferentialJ2JTest(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
people = Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('primary_language', String(50)),
- Column('reports_to_id', Integer,
- ForeignKey('managers.person_id'))
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('primary_language', String(50)),
+ Column('reports_to_id', Integer,
+ ForeignKey('managers.person_id')))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),)
@classmethod
def setup_mappers(cls):
people = cls.tables.people
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person')
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(Manager, managers,
- inherits=Person,
- polymorphic_identity='manager')
+ inherits=Person,
+ polymorphic_identity='manager')
mapper(Engineer, engineers,
- inherits=Person,
- polymorphic_identity='engineer',
- properties={
- 'reports_to':relationship(
- Manager,
- primaryjoin=
- managers.c.person_id == engineers.c.reports_to_id,
- backref='engineers')})
-
+ inherits=Person,
+ polymorphic_identity='engineer',
+ properties={
+ 'reports_to': relationship(
+ Manager,
+ primaryjoin=(
+ managers.c.person_id == engineers.c.reports_to_id),
+ backref='engineers')})
def test_has(self):
m1 = Manager(name='dogbert')
eq_(sess.query(Manager)
.join(Manager.engineers)
- .filter(Engineer.reports_to == None).all(),
+ .filter(Engineer.reports_to == None).all(), # noqa
[])
eq_(sess.query(Manager)
.filter(Engineer.reports_to == m1).all(),
[m1])
+
class SelfReferentialJ2JSelfTest(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
people = Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('reports_to_id', Integer,
- ForeignKey('engineers.person_id')))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('reports_to_id', Integer,
+ ForeignKey('engineers.person_id')))
@classmethod
def setup_mappers(cls):
people = cls.tables.people
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person')
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(Engineer, engineers,
- inherits=Person,
- polymorphic_identity='engineer',
- properties={
- 'reports_to':relationship(
- Engineer,
- primaryjoin=
- engineers.c.person_id == engineers.c.reports_to_id,
- backref='engineers',
- remote_side=engineers.c.person_id)})
+ inherits=Person,
+ polymorphic_identity='engineer',
+ properties={
+ 'reports_to': relationship(
+ Engineer,
+ primaryjoin=(
+ engineers.c.person_id == engineers.c.reports_to_id),
+ backref='engineers',
+ remote_side=engineers.c.person_id)})
def _two_obj_fixture(self):
e1 = Engineer(name='wally')
eq_(sess.query(Engineer)
.join(Engineer.engineers, aliased=True)
- .filter(Engineer.reports_to == None).all(),
+ .filter(Engineer.reports_to == None).all(), # noqa
[])
eq_(sess.query(Engineer)
eq_(sess.query(Engineer)
.join(Engineer.engineers, aliased=True)
- .filter(Engineer.reports_to != None).all(),
+ .filter(Engineer.reports_to != None).all(), # noqa
[e1, e2])
+
class M2MFilterTest(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
organizations = Table('organizations', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
engineers_to_org = Table('engineers_to_org', metadata,
- Column('org_id', Integer,
- ForeignKey('organizations.id')),
- Column('engineer_id', Integer,
- ForeignKey('engineers.person_id')))
+ Column('org_id', Integer,
+ ForeignKey('organizations.id')),
+ Column('engineer_id', Integer,
+ ForeignKey('engineers.person_id')))
people = Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('primary_language', String(50)))
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('primary_language', String(50)))
@classmethod
def setup_mappers(cls):
pass
mapper(Organization, organizations,
- properties={
- 'engineers':relationship(
- Engineer,
- secondary=engineers_to_org,
- backref='organizations')})
+ properties={
+ 'engineers': relationship(
+ Engineer,
+ secondary=engineers_to_org,
+ backref='organizations')})
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person')
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person')
mapper(Engineer, engineers,
- inherits=Person,
- polymorphic_identity='engineer')
+ inherits=Person,
+ polymorphic_identity='engineer')
@classmethod
def insert_data(cls):
eq_(sess.query(Organization)
.filter(~Organization.engineers
- .of_type(Engineer)
- .contains(e1))
+ .of_type(Engineer)
+ .contains(e1))
.all(),
[Organization(name='org2')])
# this had a bug
eq_(sess.query(Organization)
.filter(~Organization.engineers
- .contains(e1))
- .all(),
+ .contains(e1))
+ .all(),
[Organization(name='org2')])
def test_any(self):
eq_(sess.query(Organization)
.filter(Organization.engineers
- .of_type(Engineer)
- .any(Engineer.name == 'e1'))
+ .of_type(Engineer)
+ .any(Engineer.name == 'e1'))
.all(),
[Organization(name='org1')])
eq_(sess.query(Organization)
.filter(Organization.engineers
- .any(Engineer.name == 'e1'))
+ .any(Engineer.name == 'e1'))
.all(),
[Organization(name='org1')])
+
class SelfReferentialM2MTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table('secondary', metadata,
- Column('left_id', Integer,
- ForeignKey('parent.id'),
- nullable=False),
- Column('right_id', Integer,
- ForeignKey('parent.id'),
- nullable=False))
+ Column('left_id', Integer,
+ ForeignKey('parent.id'),
+ nullable=False),
+ Column('right_id', Integer,
+ ForeignKey('parent.id'),
+ nullable=False))
Table('parent', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('cls', String(50)))
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('cls', String(50)))
Table('child1', metadata,
- Column('id', Integer,
- ForeignKey('parent.id'),
- primary_key=True))
+ Column('id', Integer,
+ ForeignKey('parent.id'),
+ primary_key=True))
Table('child2', metadata,
- Column('id', Integer,
- ForeignKey('parent.id'),
- primary_key=True))
+ Column('id', Integer,
+ ForeignKey('parent.id'),
+ primary_key=True))
@classmethod
def setup_classes(cls):
class Parent(cls.Basic):
pass
+
class Child1(Parent):
pass
+
class Child2(Parent):
pass
secondary = cls.tables.secondary
mapper(Parent, parent,
- polymorphic_on=parent.c.cls)
+ polymorphic_on=parent.c.cls)
mapper(Child1, child1,
- inherits=Parent,
- polymorphic_identity='child1',
- properties={
- 'left_child2':relationship(
- Child2,
- secondary=secondary,
- primaryjoin=parent.c.id == secondary.c.right_id,
- secondaryjoin=parent.c.id == secondary.c.left_id,
- uselist=False,
- backref="right_children")})
+ inherits=Parent,
+ polymorphic_identity='child1',
+ properties={
+ 'left_child2': relationship(
+ Child2,
+ secondary=secondary,
+ primaryjoin=parent.c.id == secondary.c.right_id,
+ secondaryjoin=parent.c.id == secondary.c.left_id,
+ uselist=False,
+ backref="right_children")})
mapper(Child2, child2,
- inherits=Parent,
- polymorphic_identity='child2')
+ inherits=Parent,
+ polymorphic_identity='child2')
def test_query_crit(self):
Child1, Child2 = self.classes.Child1, self.classes.Child2
# test the same again
self.assert_compile(
- sess.query(Child2)
- .join(Child2.right_children)
- .filter(Child1.left_child2 == c22)
- .with_labels().statement,
+ sess.query(Child2).join(Child2.right_children).
+ filter(Child1.left_child2 == c22).with_labels().statement,
"SELECT child2.id AS child2_id, parent.id AS parent_id, "
"parent.cls AS parent_cls FROM secondary AS secondary_1, "
"parent JOIN child2 ON parent.id = child2.id JOIN secondary AS "
"secondary_2 ON parent.id = secondary_2.left_id JOIN "
- "(parent AS parent_1 JOIN child1 AS child1_1 ON parent_1.id = child1_1.id) "
+ "(parent AS parent_1 JOIN child1 AS child1_1 "
+ "ON parent_1.id = child1_1.id) "
"ON parent_1.id = secondary_2.right_id WHERE "
"parent_1.id = secondary_1.right_id AND :param_1 = "
- "secondary_1.left_id"
- )
+ "secondary_1.left_id")
def test_eager_join(self):
Child1, Child2 = self.classes.Child1, self.classes.Child2
for row in query_.all():
assert row.left_child2
+
class EagerToSubclassTest(fixtures.MappedTest):
"""Test eager loads to subclass mappers"""
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(10)))
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(10)))
Table('base', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(10)),
- Column('related_id', Integer,
- ForeignKey('related.id')))
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(10)),
+ Column('related_id', Integer,
+ ForeignKey('related.id')))
Table('sub', metadata,
- Column('id', Integer,
- ForeignKey('base.id'),
- primary_key=True),
- Column('data', String(10)),
- Column('parent_id', Integer,
- ForeignKey('parent.id'),
- nullable=False))
+ Column('id', Integer,
+ ForeignKey('base.id'),
+ primary_key=True),
+ Column('data', String(10)),
+ Column('parent_id', Integer,
+ ForeignKey('parent.id'),
+ nullable=False))
Table('related', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(10)))
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(10)))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
+
class Base(cls.Comparable):
pass
+
class Sub(Base):
pass
+
class Related(cls.Comparable):
pass
Related = cls.classes.Related
mapper(Parent, parent,
- properties={'children':relationship(Sub, order_by=sub.c.data)})
+ properties={'children': relationship(Sub, order_by=sub.c.data)})
mapper(Base, base,
- polymorphic_on=base.c.type,
- polymorphic_identity='b',
- properties={'related':relationship(Related)})
+ polymorphic_on=base.c.type,
+ polymorphic_identity='b',
+ properties={'related': relationship(Related)})
mapper(Sub, sub,
- inherits=Base,
- polymorphic_identity='s')
+ inherits=Base,
+ polymorphic_identity='s')
mapper(Related, related)
def test_joinedload(self):
Parent = self.classes.Parent
sess = Session()
+
def go():
eq_(sess.query(Parent)
.options(joinedload(Parent.children)).all(),
Parent = self.classes.Parent
Sub = self.classes.Sub
sess = Session()
+
def go():
eq_(sess.query(Parent)
.join(Parent.children)
[p1, p2])
self.assert_sql_count(testing.db, go, 3)
+
class SubClassEagerToSubClassTest(fixtures.MappedTest):
"""Test joinedloads from subclass to subclass mappers"""
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(10)),
- )
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(10)))
Table('subparent', metadata,
- Column('id', Integer,
- ForeignKey('parent.id'),
- primary_key=True),
- Column('data', String(10)),
- )
+ Column('id', Integer,
+ ForeignKey('parent.id'),
+ primary_key=True),
+ Column('data', String(10)))
Table('base', metadata,
- Column('id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(10)),
- )
+ Column('id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(10)))
Table('sub', metadata,
- Column('id', Integer,
- ForeignKey('base.id'),
- primary_key=True),
- Column('data', String(10)),
- Column('subparent_id', Integer,
- ForeignKey('subparent.id'),
- nullable=False)
- )
+ Column('id', Integer,
+ ForeignKey('base.id'),
+ primary_key=True),
+ Column('data', String(10)),
+ Column('subparent_id', Integer,
+ ForeignKey('subparent.id'),
+ nullable=False))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
+
class Subparent(Parent):
pass
+
class Base(cls.Comparable):
pass
+
class Sub(Base):
pass
Subparent = cls.classes.Subparent
mapper(Parent, parent,
- polymorphic_on=parent.c.type,
- polymorphic_identity='b')
+ polymorphic_on=parent.c.type,
+ polymorphic_identity='b')
mapper(Subparent, subparent,
- inherits=Parent,
- polymorphic_identity='s',
- properties={
- 'children':relationship(Sub, order_by=base.c.id)})
+ inherits=Parent,
+ polymorphic_identity='s',
+ properties={
+ 'children': relationship(Sub, order_by=base.c.id)})
mapper(Base, base,
- polymorphic_on=base.c.type,
- polymorphic_identity='b')
+ polymorphic_on=base.c.type,
+ polymorphic_identity='b')
mapper(Sub, sub,
- inherits=Base,
- polymorphic_identity='s')
+ inherits=Base,
+ polymorphic_identity='s')
@classmethod
def insert_data(cls):
Subparent = self.classes.Subparent
sess = create_session()
+
def go():
eq_(sess.query(Subparent)
.options(joinedload(Subparent.children)).all(),
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
+
def go():
eq_(sess.query(Subparent)
.options(joinedload("children")).all(),
Subparent = self.classes.Subparent
sess = create_session()
+
def go():
eq_(sess.query(Subparent)
.join(Subparent.children)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
+
def go():
eq_(sess.query(Subparent)
.join(Subparent.children)
Subparent = self.classes.Subparent
sess = create_session()
+
def go():
eq_(sess.query(Subparent)
.options(subqueryload(Subparent.children)).all(),
self.assert_sql_count(testing.db, go, 2)
sess.expunge_all()
+
def go():
eq_(sess.query(Subparent)
.options(subqueryload("children")).all(),
[p1, p2])
self.assert_sql_count(testing.db, go, 2)
+
class SameNamedPropTwoPolymorphicSubClassesTest(fixtures.MappedTest):
"""test pathing when two subclasses contain a different property
for the same name, and polymorphic loading is used.
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(10))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(10)))
Table('b', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True)
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True))
Table('btod', metadata,
- Column('bid', Integer, ForeignKey('b.id'), nullable=False),
- Column('did', Integer, ForeignKey('d.id'), nullable=False)
- )
+ Column('bid', Integer, ForeignKey('b.id'), nullable=False),
+ Column('did', Integer, ForeignKey('d.id'), nullable=False)
+ )
Table('c', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True)
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True))
Table('ctod', metadata,
- Column('cid', Integer, ForeignKey('c.id'), nullable=False),
- Column('did', Integer, ForeignKey('d.id'), nullable=False)
- )
+ Column('cid', Integer, ForeignKey('c.id'), nullable=False),
+ Column('did', Integer, ForeignKey('d.id'), nullable=False))
Table('d', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(A):
pass
+
class C(A):
pass
+
class D(cls.Comparable):
pass
mapper(A, cls.tables.a, polymorphic_on=cls.tables.a.c.type)
mapper(B, cls.tables.b, inherits=A, polymorphic_identity='b',
- properties={
- 'related': relationship(D, secondary=cls.tables.btod)
- })
+ properties={
+ 'related': relationship(D, secondary=cls.tables.btod)
+ })
mapper(C, cls.tables.c, inherits=A, polymorphic_identity='c',
- properties={
- 'related': relationship(D, secondary=cls.tables.ctod)
- })
+ properties={
+ 'related': relationship(D, secondary=cls.tables.ctod)
+ })
mapper(D, cls.tables.d)
-
@classmethod
def insert_data(cls):
B = cls.classes.B
session = Session()
d = session.query(D).one()
a_poly = with_polymorphic(A, [B, C])
+
def go():
for a in session.query(a_poly).\
- options(
- subqueryload(a_poly.B.related),
- subqueryload(a_poly.C.related)):
+ options(
+ subqueryload(a_poly.B.related),
+ subqueryload(a_poly.C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 3)
session = Session()
d = session.query(D).one()
+
def go():
for a in session.query(A).with_polymorphic([B, C]).\
- options(subqueryload(B.related), subqueryload(C.related)):
+ options(subqueryload(B.related), subqueryload(C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 3)
session = Session()
d = session.query(D).one()
a_poly = with_polymorphic(A, [B, C])
+
def go():
for a in session.query(a_poly).\
- options(
- joinedload(a_poly.B.related),
- joinedload(a_poly.C.related)):
+ options(
+ joinedload(a_poly.B.related),
+ joinedload(a_poly.C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 1)
session = Session()
d = session.query(D).one()
+
def go():
for a in session.query(A).with_polymorphic([B, C]).\
- options(joinedload(B.related), joinedload(C.related)):
+ options(joinedload(B.related), joinedload(C.related)):
eq_(a.related, [d])
self.assert_sql_count(testing.db, go, 1)
@classmethod
def define_tables(cls, metadata):
Table('z', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('a', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(10)),
- Column('z_id', Integer, ForeignKey('z.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(10)),
+ Column('z_id', Integer, ForeignKey('z.id')))
Table('b', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True)
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True))
Table('d', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True),
- Column('b_id', Integer, ForeignKey('b.id'))
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('b_id', Integer, ForeignKey('b.id')))
@classmethod
def setup_classes(cls):
class Z(cls.Comparable):
pass
+
class A(cls.Comparable):
pass
+
class B(A):
pass
+
class D(A):
pass
mapper(Z, cls.tables.z)
mapper(A, cls.tables.a, polymorphic_on=cls.tables.a.c.type,
- with_polymorphic='*',
- properties={
- 'zs': relationship(Z, lazy="subquery")
- })
+ with_polymorphic='*',
+ properties={
+ 'zs': relationship(Z, lazy="subquery")
+ })
mapper(B, cls.tables.b, inherits=A, polymorphic_identity='b',
- properties={
- 'related': relationship(D, lazy="subquery",
- primaryjoin=cls.tables.d.c.b_id ==
- cls.tables.b.c.id)
- })
+ properties={
+ 'related': relationship(D, lazy="subquery",
+ primaryjoin=cls.tables.d.c.b_id ==
+ cls.tables.b.c.id)
+ })
mapper(D, cls.tables.d, inherits=A, polymorphic_identity='d')
-
@classmethod
def insert_data(cls):
B = cls.classes.B
def test_2617(self):
A = self.classes.A
session = Session()
+
def go():
a1 = session.query(A).first()
eq_(a1.related, [])
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
Table('base1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
Table('sub1', metadata,
- Column('id', Integer, ForeignKey('base1.id'), primary_key=True),
- Column('parent_id', ForeignKey('parent.id')),
- Column('subdata', String(30))
- )
+ Column('id', Integer, ForeignKey('base1.id'), primary_key=True),
+ Column('parent_id', ForeignKey('parent.id')),
+ Column('subdata', String(30)))
Table('base2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('base1_id', ForeignKey('base1.id')),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('base1_id', ForeignKey('base1.id')),
+ Column('data', String(30)))
Table('sub2', metadata,
- Column('id', Integer, ForeignKey('base2.id'), primary_key=True),
- Column('subdata', String(30))
- )
+ Column('id', Integer, ForeignKey('base2.id'), primary_key=True),
+ Column('subdata', String(30)))
Table('ep1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('base2_id', Integer, ForeignKey('base2.id')),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('base2_id', Integer, ForeignKey('base2.id')),
+ Column('data', String(30)))
Table('ep2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('base2_id', Integer, ForeignKey('base2.id')),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('base2_id', Integer, ForeignKey('base2.id')),
+ Column('data', String(30)))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
+
class Base1(cls.Comparable):
pass
+
class Sub1(Base1):
pass
+
class Base2(cls.Comparable):
pass
+
class Sub2(Base2):
pass
+
class EP1(cls.Comparable):
pass
+
class EP2(cls.Comparable):
pass
Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = cls._classes()
mapper(Parent, cls.tables.parent, properties={
- 'sub1': relationship(Sub1)
- })
+ 'sub1': relationship(Sub1)
+ })
mapper(Base1, cls.tables.base1, properties={
- 'sub2': relationship(Sub2)
- })
+ 'sub2': relationship(Sub2)
+ })
mapper(Sub1, cls.tables.sub1, inherits=Base1)
mapper(Base2, cls.tables.base2, properties={
- 'ep1': relationship(EP1),
- 'ep2': relationship(EP2)
- })
+ 'ep1': relationship(EP1),
+ 'ep2': relationship(EP2)
+ })
mapper(Sub2, cls.tables.sub2, inherits=Base2)
mapper(EP1, cls.tables.ep1)
mapper(EP2, cls.tables.ep2)
s = Session()
self.assert_compile(
s.query(Parent).join(Parent.sub1, Sub1.sub2).
- join(Sub2.ep1).
- join(Sub2.ep2),
+ join(Sub2.ep1).
+ join(Sub2.ep2),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (base1 JOIN sub1 ON base1.id = sub1.id) "
"ON parent.id = sub1.parent_id JOIN "
s = Session()
self.assert_compile(
s.query(Parent).join(Parent.sub1).
- join(s2a, Sub1.sub2),
+ join(s2a, Sub1.sub2),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (base1 JOIN sub1 ON base1.id = sub1.id) "
"ON parent.id = sub1.parent_id JOIN "
s = Session()
self.assert_compile(
s.query(Base1).join(Base1.sub2).
- join(Sub2.ep1).\
- join(Sub2.ep2),
+ join(Sub2.ep1).
+ join(Sub2.ep2),
"SELECT base1.id AS base1_id, base1.data AS base1_data "
"FROM base1 JOIN (base2 JOIN sub2 "
"ON base2.id = sub2.id) ON base1.id = "
s = Session()
self.assert_compile(
s.query(Sub2).join(Base1, Base1.id == Sub2.base1_id).
- join(Sub2.ep1).\
- join(Sub2.ep2),
+ join(Sub2.ep1).
+ join(Sub2.ep2),
"SELECT sub2.id AS sub2_id, base2.id AS base2_id, "
"base2.base1_id AS base2_base1_id, base2.data AS base2_data, "
"sub2.subdata AS sub2_subdata "
s = Session()
self.assert_compile(
s.query(Sub2).join(Sub1, Sub1.id == Sub2.base1_id).
- join(Sub2.ep1).\
- join(Sub2.ep2),
+ join(Sub2.ep1).
+ join(Sub2.ep2),
"SELECT sub2.id AS sub2_id, base2.id AS base2_id, "
"base2.base1_id AS base2_base1_id, base2.data AS base2_data, "
"sub2.subdata AS sub2_subdata "
s = Session()
self.assert_compile(
- s.query(Sub2).from_self().\
- join(Sub2.ep1).
- join(Sub2.ep2),
+ s.query(Sub2).from_self().
+ join(Sub2.ep1).
+ join(Sub2.ep2),
"SELECT anon_1.sub2_id AS anon_1_sub2_id, "
"anon_1.base2_id AS anon_1_base2_id, "
"anon_1.base2_base1_id AS anon_1_base2_base1_id, "
# otherwise the joins for Sub2.ep1/ep2 don't have columns
# to latch onto. Can't really make it better than this
s.query(Parent, Sub2).join(Parent.sub1).\
- join(Sub1.sub2).from_self().\
- join(Sub2.ep1).
- join(Sub2.ep2),
+ join(Sub1.sub2).from_self().\
+ join(Sub2.ep1).
+ join(Sub2.ep2),
"SELECT anon_1.parent_id AS anon_1_parent_id, "
"anon_1.parent_data AS anon_1_parent_data, "
"anon_1.sub2_id AS anon_1_sub2_id, "
"parent_1.owner_id AS parent_1_owner_id, "
"owner_1.id AS owner_1_id, owner_1.type AS owner_1_type "
"FROM link LEFT OUTER JOIN parent AS parent_1 "
- "ON link.child_id = parent_1.id " + extra +
+ "ON link.child_id = parent_1.id " + extra +
"LEFT OUTER JOIN owner AS owner_1 "
"ON owner_1.id = parent_1.owner_id"
)
class JoinAcrossJoinedInhMultiPath(fixtures.DeclarativeMappedTest,
- testing.AssertsCompiledSQL):
+ testing.AssertsCompiledSQL):
"""test long join paths with a joined-inh in the middle, where we go multiple
times across the same joined-inh to the same target but with other classes
in the middle. E.g. test [ticket:2908]
"""
-
run_setup_mappers = 'once'
__dialect__ = 'default'
def test_join(self):
Root, Intermediate, Sub1, Target = \
- self.classes.Root, self.classes.Intermediate, \
- self.classes.Sub1, self.classes.Target
+ self.classes.Root, self.classes.Intermediate, \
+ self.classes.Sub1, self.classes.Target
s1_alias = aliased(Sub1)
s2_alias = aliased(Sub1)
t1_alias = aliased(Target)
sess = Session()
q = sess.query(Root).\
- join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
- join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
- join(t2_alias, s2_alias.target)
- self.assert_compile(q,
+ join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
+ join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
+ join(t2_alias, s2_alias.target)
+ self.assert_compile(
+ q,
"SELECT root.id AS root_id, root.sub1_id AS root_sub1_id "
"FROM root "
"JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id "
- "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_1 "
- "ON anon_1.sub1_id = root.sub1_id "
+ "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_1 "
+ "ON anon_1.sub1_id = root.sub1_id "
"JOIN target AS target_1 ON anon_1.sub1_id = target_1.sub1_id "
"JOIN intermediate ON root.id = intermediate.root_id "
"JOIN (SELECT parent.id AS parent_id, sub1.id AS sub1_id "
- "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_2 "
- "ON anon_2.sub1_id = intermediate.sub1_id "
+ "FROM parent JOIN sub1 ON parent.id = sub1.id) AS anon_2 "
+ "ON anon_2.sub1_id = intermediate.sub1_id "
"JOIN target AS target_2 ON anon_2.sub1_id = target_2.sub1_id")
def test_join_flat(self):
Root, Intermediate, Sub1, Target = \
- self.classes.Root, self.classes.Intermediate, \
- self.classes.Sub1, self.classes.Target
+ self.classes.Root, self.classes.Intermediate, \
+ self.classes.Sub1, self.classes.Target
s1_alias = aliased(Sub1, flat=True)
s2_alias = aliased(Sub1, flat=True)
t1_alias = aliased(Target)
sess = Session()
q = sess.query(Root).\
- join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
- join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
- join(t2_alias, s2_alias.target)
- self.assert_compile(q,
+ join(s1_alias, Root.sub1).join(t1_alias, s1_alias.target).\
+ join(Root.intermediate).join(s2_alias, Intermediate.sub1).\
+ join(t2_alias, s2_alias.target)
+ self.assert_compile(
+ q,
"SELECT root.id AS root_id, root.sub1_id AS root_sub1_id "
"FROM root "
- "JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 ON parent_1.id = sub1_1.id) "
- "ON sub1_1.id = root.sub1_id "
+ "JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 "
+ "ON parent_1.id = sub1_1.id) "
+ "ON sub1_1.id = root.sub1_id "
"JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id "
"JOIN intermediate ON root.id = intermediate.root_id "
- "JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 ON parent_2.id = sub1_2.id) "
- "ON sub1_2.id = intermediate.sub1_id "
- "JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id"
- )
+ "JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 "
+ "ON parent_2.id = sub1_2.id) "
+ "ON sub1_2.id = intermediate.sub1_id "
+ "JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id")
def test_joinedload(self):
Root, Intermediate, Sub1, Target = \
- self.classes.Root, self.classes.Intermediate, \
- self.classes.Sub1, self.classes.Target
+ self.classes.Root, self.classes.Intermediate, \
+ self.classes.Sub1, self.classes.Target
sess = Session()
q = sess.query(Root).\
- options(
- joinedload(Root.sub1).joinedload(Sub1.target),
- joinedload(Root.intermediate).joinedload(Intermediate.sub1).\
- joinedload(Sub1.target),
- )
- self.assert_compile(q,
+ options(
+ joinedload(Root.sub1).joinedload(Sub1.target),
+ joinedload(Root.intermediate).joinedload(Intermediate.sub1).
+ joinedload(Sub1.target))
+ self.assert_compile(
+ q,
"SELECT root.id AS root_id, root.sub1_id AS root_sub1_id, "
- "target_1.id AS target_1_id, target_1.sub1_id AS target_1_sub1_id, "
+ "target_1.id AS target_1_id, "
+ "target_1.sub1_id AS target_1_sub1_id, "
"sub1_1.id AS sub1_1_id, parent_1.id AS parent_1_id, "
"intermediate_1.id AS intermediate_1_id, "
"intermediate_1.sub1_id AS intermediate_1_sub1_id, "
"intermediate_1.root_id AS intermediate_1_root_id, "
- "target_2.id AS target_2_id, target_2.sub1_id AS target_2_sub1_id, "
+ "target_2.id AS target_2_id, "
+ "target_2.sub1_id AS target_2_sub1_id, "
"sub1_2.id AS sub1_2_id, parent_2.id AS parent_2_id "
"FROM root "
"LEFT OUTER JOIN intermediate AS intermediate_1 "
- "ON root.id = intermediate_1.root_id "
+ "ON root.id = intermediate_1.root_id "
"LEFT OUTER JOIN (parent AS parent_1 JOIN sub1 AS sub1_1 "
- "ON parent_1.id = sub1_1.id) ON sub1_1.id = intermediate_1.sub1_id "
- "LEFT OUTER JOIN target AS target_1 ON sub1_1.id = target_1.sub1_id "
+ "ON parent_1.id = sub1_1.id) "
+ "ON sub1_1.id = intermediate_1.sub1_id "
+ "LEFT OUTER JOIN target AS target_1 "
+ "ON sub1_1.id = target_1.sub1_id "
"LEFT OUTER JOIN (parent AS parent_2 JOIN sub1 AS sub1_2 "
- "ON parent_2.id = sub1_2.id) ON sub1_2.id = root.sub1_id "
- "LEFT OUTER JOIN target AS target_2 ON sub1_2.id = target_2.sub1_id")
+ "ON parent_2.id = sub1_2.id) ON sub1_2.id = root.sub1_id "
+ "LEFT OUTER JOIN target AS target_2 "
+ "ON sub1_2.id = target_2.sub1_id")
-class MultipleAdaptUsesEntityOverTableTest(AssertsCompiledSQL, fixtures.MappedTest):
+class MultipleAdaptUsesEntityOverTableTest(
+ AssertsCompiledSQL, fixtures.MappedTest):
__dialect__ = 'default'
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String)
- )
+ Column('id', Integer, primary_key=True),
+ Column('name', String))
Table('b', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True)
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True))
Table('c', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True),
- Column('bid', Integer, ForeignKey('b.id'))
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('bid', Integer, ForeignKey('b.id')))
Table('d', metadata,
- Column('id', Integer, ForeignKey('a.id'), primary_key=True),
- Column('cid', Integer, ForeignKey('c.id'))
- )
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('cid', Integer, ForeignKey('c.id')))
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(A):
pass
+
class C(A):
pass
+
class D(A):
pass
mapper(D, d, inherits=A)
def _two_join_fixture(self):
- A, B, C, D = self.classes.A, self.classes.B, self.classes.C, self.classes.D
+ A, B, C, D = (self.classes.A, self.classes.B, self.classes.C,
+ self.classes.D)
s = Session()
return s.query(B.name, C.name, D.name).select_from(B).\
- join(C, C.bid == B.id).\
- join(D, D.cid == C.id)
+ join(C, C.bid == B.id).\
+ join(D, D.cid == C.id)
def test_two_joins_adaption(self):
a, b, c, d = self.tables.a, self.tables.b, self.tables.c, self.tables.d
def test_two_joins_sql(self):
q = self._two_join_fixture()
- self.assert_compile(q,
+ self.assert_compile(
+ q,
"SELECT a.name AS a_name, a_1.name AS a_1_name, "
"a_2.name AS a_2_name "
"FROM a JOIN b ON a.id = b.id JOIN "
"(a AS a_1 JOIN c AS c_1 ON a_1.id = c_1.id) ON c_1.bid = b.id "
"JOIN (a AS a_2 JOIN d AS d_1 ON a_2.id = d_1.id) "
- "ON d_1.cid = c_1.id"
- )
+ "ON d_1.cid = c_1.id")
class SameNameOnJoined(fixtures.MappedTest):
"seen AS seen_1 ON people.id = seen_1.id LEFT OUTER JOIN "
"seen AS seen_2 ON people_1.id = seen_2.id"
)
-
from sqlalchemy.testing import fixtures, eq_
from sqlalchemy.testing.schema import Table, Column
+
class InheritingSelectablesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(Foo):
pass
+
class Baz(Foo):
pass
mapper(Foo, foo, polymorphic_on=foo.c.b)
mapper(Baz, baz,
- with_polymorphic=('*', foo.join(baz, foo.c.b == 'baz').alias('baz')),
- inherits=Foo,
- inherit_condition=(foo.c.a == baz.c.a),
- inherit_foreign_keys=[baz.c.a],
- polymorphic_identity='baz')
+ with_polymorphic=('*',
+ foo.join(baz, foo.c.b == 'baz').alias('baz')),
+ inherits=Foo, inherit_condition=(foo.c.a == baz.c.a),
+ inherit_foreign_keys=[baz.c.a],
+ polymorphic_identity='baz')
mapper(Bar, bar,
- with_polymorphic=('*', foo.join(bar, foo.c.b == 'bar').alias('bar')),
- inherits=Foo,
- inherit_condition=(foo.c.a == bar.c.a),
- inherit_foreign_keys=[bar.c.a],
- polymorphic_identity='bar')
+ with_polymorphic=('*',
+ foo.join(bar, foo.c.b == 'bar').alias('bar')),
+ inherits=Foo, inherit_condition=(foo.c.a == bar.c.a),
+ inherit_foreign_keys=[bar.c.a],
+ polymorphic_identity='bar')
s = Session()
- assert [Baz(), Baz(), Bar(), Bar()] == s.query(Foo).order_by(Foo.b.desc()).all()
+ assert [Baz(), Baz(), Bar(), Bar()] == s.query(
+ Foo).order_by(Foo.b.desc()).all()
assert [Bar(), Bar()] == s.query(Bar).all()
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(50)))
Table('child', metadata,
- # 1. name of column must be different, so that we rely on
- # mapper._table_to_equated to link the two cols
- Column('child_id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('name', String(50))
- )
+ # 1. name of column must be different, so that we rely on
+ # mapper._table_to_equated to link the two cols
+ Column('child_id', Integer, ForeignKey(
+ 'base.id'), primary_key=True),
+ Column('name', String(50)))
@classmethod
def setup_classes(cls):
class Base(cls.Comparable):
pass
+
class Child(Base):
pass
base_select = select([base]).alias()
mapper(Base, base_select, polymorphic_on=base_select.c.type,
- polymorphic_identity='base')
+ polymorphic_identity='base')
mapper(Child, child, inherits=Base,
- polymorphic_identity='child')
+ polymorphic_identity='child')
sess = Session()
@classmethod
def define_tables(cls, metadata):
Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('manager_data', String(50)),
- Column('engineer_info', String(50)),
- Column('type', String(20)))
+ Column('employee_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('manager_data', String(50)),
+ Column('engineer_info', String(50)),
+ Column('type', String(20)))
Table('reports', metadata,
- Column('report_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('report_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('employee_id', ForeignKey('employees.employee_id')),
- Column('name', String(50)),
- )
+ Column('name', String(50)),)
@classmethod
def setup_classes(cls):
global Employee, Manager, Engineer, JuniorEngineer
+
class Employee(cls.Comparable):
pass
+
class Manager(Employee):
pass
+
class Engineer(Employee):
pass
+
class JuniorEngineer(Engineer):
pass
@classmethod
def setup_mappers(cls):
- Employee, Manager, JuniorEngineer, employees, Engineer = (cls.classes.Employee,
- cls.classes.Manager,
- cls.classes.JuniorEngineer,
- cls.tables.employees,
- cls.classes.Engineer)
+ Employee, Manager, JuniorEngineer, employees, Engineer = (
+ cls.classes.Employee, cls.classes.Manager, cls.classes.
+ JuniorEngineer, cls.tables.employees, cls.classes.Engineer)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Manager, inherits=Employee, polymorphic_identity='manager')
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
- mapper(JuniorEngineer, inherits=Engineer, polymorphic_identity='juniorengineer')
+ mapper(JuniorEngineer, inherits=Engineer,
+ polymorphic_identity='juniorengineer')
def _fixture_one(self):
Employee, JuniorEngineer, Manager, Engineer = (
session.expire(m1, ['manager_data'])
eq_(m1.manager_data, "knows how to manage things")
- row = session.query(Engineer.name, Engineer.employee_id).filter(Engineer.name=='Kurt').first()
+ row = session.query(Engineer.name, Engineer.employee_id).filter(
+ Engineer.name == 'Kurt').first()
assert row.name == 'Kurt'
assert row.employee_id == e1.employee_id
[("Tom", "Kurt"), ("Tom", "Ed")]
)
- eq_(
- session.query(func.upper(Manager.name), func.upper(ealias.name)).all(),
- [("TOM", "KURT"), ("TOM", "ED")]
- )
+ eq_(session.query(func.upper(Manager.name),
+ func.upper(ealias.name)).all(),
+ [("TOM", "KURT"), ("TOM", "ED")])
eq_(
session.query(Manager).add_entity(ealias).all(),
# TODO: I think raise error on this for now
# self.assertEquals(
- # session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(),
+ # session.query(Employee.name, Manager.manager_data,
+ # Engineer.engineer_info).all(),
# []
# )
"anon_1_employees_employee_id, "
"anon_1.employees_name AS anon_1_employees_name, "
"anon_1.employees_manager_data AS anon_1_employees_manager_data, "
- "anon_1.employees_engineer_info AS anon_1_employees_engineer_info, "
+ "anon_1.employees_engineer_info AS anon_1_employees_engineer_info, " # noqa
"anon_1.employees_type AS anon_1_employees_type "
"FROM (SELECT employees.employee_id AS employees_employee_id, "
"employees.name AS employees_name, "
"employees.engineer_info AS employees_engineer_info, "
"employees.type AS employees_type FROM employees "
"WHERE employees.manager_data = :manager_data_1 "
- "AND employees.type IN (:type_3)) AS anon_1"
- )
+ "AND employees.type IN (:type_3)) AS anon_1")
for meth, token in [
(q1.union, "UNION"),
)
def test_select_from(self):
- Manager, JuniorEngineer, employees, Engineer = (self.classes.Manager,
- self.classes.JuniorEngineer,
- self.tables.employees,
- self.classes.Engineer)
+ Manager = self.classes.Manager
+ JuniorEngineer = self.classes.JuniorEngineer
+ employees = self.tables.employees
+ Engineer = self.classes.Engineer
sess = create_session()
m1 = Manager(name='Tom', manager_data='data1')
sess.add_all([m1, m2, e1, e2])
sess.flush()
- eq_(
- sess.query(Manager).select_from(employees.select().limit(10)).all(),
- [m1, m2]
- )
+ eq_(sess.query(Manager).select_from(
+ employees.select().limit(10)).all(), [m1, m2])
def test_count(self):
- Employee, JuniorEngineer, Manager, Engineer = (self.classes.Employee,
- self.classes.JuniorEngineer,
- self.classes.Manager,
- self.classes.Engineer)
+ Employee = self.classes.Employee
+ JuniorEngineer = self.classes.JuniorEngineer
+ Manager = self.classes.Manager
+ Engineer = self.classes.Engineer
sess = create_session()
m1 = Manager(name='Tom', manager_data='data1')
def test_type_filtering(self):
Employee, Manager, reports, Engineer = (self.classes.Employee,
- self.classes.Manager,
- self.tables.reports,
- self.classes.Engineer)
+ self.classes.Manager,
+ self.tables.reports,
+ self.classes.Engineer)
class Report(fixtures.ComparableEntity):
pass
sess.flush()
rq = sess.query(Report)
- assert len(rq.filter(Report.employee.of_type(Manager).has()).all()) == 1
- assert len(rq.filter(Report.employee.of_type(Engineer).has()).all()) == 0
+ assert len(rq.filter(Report.employee.of_type(Manager).has())
+ .all()) == 1
+ assert len(rq.filter(Report.employee.of_type(Engineer).has())
+ .all()) == 0
def test_type_joins(self):
Employee, Manager, reports, Engineer = (self.classes.Employee,
- self.classes.Manager,
- self.tables.reports,
- self.classes.Engineer)
+ self.classes.Manager,
+ self.tables.reports,
+ self.classes.Engineer)
class Report(fixtures.ComparableEntity):
pass
assert len(rq.join(Report.employee.of_type(Manager)).all()) == 1
assert len(rq.join(Report.employee.of_type(Engineer)).all()) == 0
-class RelationshipFromSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
+
+class RelationshipFromSingleTest(
+ testing.AssertsCompiledSQL, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('employee', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(20)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(20)))
Table('employee_stuff', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('employee_id', Integer, ForeignKey('employee.id')),
- Column('name', String(50)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('employee_id', Integer, ForeignKey('employee.id')),
+ Column('name', String(50)))
@classmethod
def setup_classes(cls):
class Employee(cls.Comparable):
pass
+
class Manager(Employee):
pass
+
class Stuff(cls.Comparable):
pass
def test_subquery_load(self):
- employee, employee_stuff, Employee, Stuff, Manager = (self.tables.employee,
- self.tables.employee_stuff,
- self.classes.Employee,
- self.classes.Stuff,
- self.classes.Manager)
-
- mapper(Employee, employee, polymorphic_on=employee.c.type, polymorphic_identity='employee')
- mapper(Manager, inherits=Employee, polymorphic_identity='manager', properties={
- 'stuff':relationship(Stuff)
- })
+ employee, employee_stuff, Employee, Stuff, Manager = (
+ self.tables.employee, self.tables.employee_stuff, self.classes.
+ Employee, self.classes.Stuff, self.classes.Manager)
+
+ mapper(Employee, employee, polymorphic_on=employee.c.type,
+ polymorphic_identity='employee')
+ mapper(Manager, inherits=Employee, polymorphic_identity='manager',
+ properties={'stuff': relationship(Stuff)})
mapper(Stuff, employee_stuff)
sess = create_session()
- context = sess.query(Manager).options(subqueryload('stuff'))._compile_context()
- subq = context.attributes[('subquery',
- (class_mapper(Manager), class_mapper(Manager).attrs.stuff))]
+ context = sess.query(Manager).options(
+ subqueryload('stuff'))._compile_context()
+ subq = context.attributes[('subquery', (class_mapper(
+ Manager), class_mapper(Manager).attrs.stuff))]
self.assert_compile(subq,
'SELECT employee_stuff.id AS '
'JOIN employee_stuff ON anon_1.employee_id '
'= employee_stuff.employee_id ORDER BY '
'anon_1.employee_id',
- use_default_dialect=True
- )
+ use_default_dialect=True)
-class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
+
+class RelationshipToSingleTest(
+ testing.AssertsCompiledSQL, fixtures.MappedTest):
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('manager_data', String(50)),
- Column('engineer_info', String(50)),
- Column('type', String(20)),
- Column('company_id', Integer, ForeignKey('companies.company_id'))
- )
+ Column('employee_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('manager_data', String(50)),
+ Column('engineer_info', String(50)),
+ Column('type', String(20)),
+ Column('company_id', Integer,
+ ForeignKey('companies.company_id')))
Table('companies', metadata,
- Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)),
- )
+ Column('company_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),)
@classmethod
def setup_classes(cls):
class Employee(cls.Comparable):
pass
+
class Manager(Employee):
pass
+
class Engineer(Employee):
pass
+
class JuniorEngineer(Engineer):
pass
def test_of_type(self):
JuniorEngineer, Company, companies, Manager,\
- Employee, employees, Engineer = (self.classes.JuniorEngineer,
- self.classes.Company,
- self.tables.companies,
- self.classes.Manager,
- self.classes.Employee,
- self.tables.employees,
- self.classes.Engineer)
+ Employee, employees, Engineer = (self.classes.JuniorEngineer,
+ self.classes.Company,
+ self.tables.companies,
+ self.classes.Manager,
+ self.classes.Employee,
+ self.tables.employees,
+ self.classes.Engineer)
mapper(Company, companies, properties={
- 'employees':relationship(Employee, backref='company')
+ 'employees': relationship(Employee, backref='company')
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Manager, inherits=Employee, polymorphic_identity='manager')
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
- mapper(JuniorEngineer, inherits=Engineer, polymorphic_identity='juniorengineer')
+ mapper(JuniorEngineer, inherits=Engineer,
+ polymorphic_identity='juniorengineer')
sess = sessionmaker()()
c1 = Company(name='c1')
m1 = Manager(name='Tom', manager_data='data1', company=c1)
m2 = Manager(name='Tom2', manager_data='data2', company=c2)
- e1 = Engineer(name='Kurt', engineer_info='knows how to hack', company=c2)
+ e1 = Engineer(name='Kurt', engineer_info='knows how to hack',
+ company=c2)
e2 = JuniorEngineer(name='Ed', engineer_info='oh that ed', company=c1)
sess.add_all([c1, c2, m1, m2, e1, e2])
sess.commit()
sess.expunge_all()
- eq_(
- sess.query(Company).filter(Company.employees.of_type(JuniorEngineer).any()).all(),
- [
- Company(name='c1'),
- ]
- )
+ eq_(sess.query(Company).filter(Company.employees.of_type(
+ JuniorEngineer).any()).all(), [Company(name='c1'), ])
- eq_(
- sess.query(Company).join(Company.employees.of_type(JuniorEngineer)).all(),
- [
- Company(name='c1'),
- ]
- )
+ eq_(sess.query(Company).join(Company.employees.of_type(
+ JuniorEngineer)).all(), [Company(name='c1'), ])
def test_of_type_aliased_fromjoinpoint(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'employee':relationship(Employee)
+ 'employee': relationship(Employee)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
)
def test_outer_join_prop(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
+ 'engineers': relationship(Engineer)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
self.assert_compile(
sess.query(Company, Engineer.name).outerjoin("engineers"),
"SELECT companies.company_id AS companies_company_id, "
- "companies.name AS companies_name, employees.name AS employees_name "
+ "companies.name AS companies_name, "
+ "employees.name AS employees_name "
"FROM companies LEFT OUTER JOIN employees ON companies.company_id "
- "= employees.company_id AND employees.type IN (:type_1)"
- )
+ "= employees.company_id AND employees.type IN (:type_1)")
def test_outer_join_prop_alias(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
+ 'engineers': relationship(Engineer)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
eng_alias = aliased(Engineer)
sess = create_session()
self.assert_compile(
- sess.query(Company, eng_alias.name).outerjoin(eng_alias, Company.engineers),
+ sess.query(Company, eng_alias.name).outerjoin(
+ eng_alias, Company.engineers),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, employees_1.name AS "
"employees_1_name FROM companies LEFT OUTER "
"JOIN employees AS employees_1 ON companies.company_id "
- "= employees_1.company_id AND employees_1.type IN (:type_1)"
- )
-
+ "= employees_1.company_id AND employees_1.type IN (:type_1)")
def test_outer_join_literal_onclause(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
+ 'engineers': relationship(Engineer)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
)
def test_outer_join_literal_onclause_alias(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
+ 'engineers': relationship(Engineer)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
)
def test_outer_join_no_onclause(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
+ 'engineers': relationship(Engineer)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
)
def test_outer_join_no_onclause_alias(self):
- Company, Employee, Engineer = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer
+ Company, Employee, Engineer = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
+ 'engineers': relationship(Engineer)
})
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
def test_no_aliasing_from_overlap(self):
# test [ticket:3233]
- Company, Employee, Engineer, Manager = self.classes.Company,\
- self.classes.Employee,\
- self.classes.Engineer,\
- self.classes.Manager
+ Company, Employee, Engineer, Manager = (self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer,
+ self.classes.Manager)
companies, employees = self.tables.companies, self.tables.employees
def test_relationship_to_subclass(self):
JuniorEngineer, Company, companies, Manager, \
- Employee, employees, Engineer = (self.classes.JuniorEngineer,
- self.classes.Company,
- self.tables.companies,
- self.classes.Manager,
- self.classes.Employee,
- self.tables.employees,
- self.classes.Engineer)
+ Employee, employees, Engineer = (self.classes.JuniorEngineer,
+ self.classes.Company,
+ self.tables.companies,
+ self.classes.Manager,
+ self.classes.Employee,
+ self.tables.employees,
+ self.classes.Engineer)
mapper(Company, companies, properties={
- 'engineers':relationship(Engineer)
- })
- mapper(Employee, employees, polymorphic_on=employees.c.type, properties={
- 'company':relationship(Company)
+ 'engineers': relationship(Engineer)
})
+ mapper(Employee, employees, polymorphic_on=employees.c.type,
+ properties={'company': relationship(Company)})
mapper(Manager, inherits=Employee, polymorphic_identity='manager')
mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
- mapper(JuniorEngineer, inherits=Engineer, polymorphic_identity='juniorengineer')
+ mapper(JuniorEngineer, inherits=Engineer,
+ polymorphic_identity='juniorengineer')
sess = sessionmaker()()
c1 = Company(name='c1')
m1 = Manager(name='Tom', manager_data='data1', company=c1)
m2 = Manager(name='Tom2', manager_data='data2', company=c2)
- e1 = Engineer(name='Kurt', engineer_info='knows how to hack', company=c2)
+ e1 = Engineer(name='Kurt', engineer_info='knows how to hack',
+ company=c2)
e2 = JuniorEngineer(name='Ed', engineer_info='oh that ed', company=c1)
sess.add_all([c1, c2, m1, m2, e1, e2])
sess.commit()
eq_(sess.query(Company).order_by(Company.name).all(),
[
Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
- Company(name='c2', engineers=[Engineer(name='Kurt')])
- ]
- )
+ Company(name='c2', engineers=[Engineer(name='Kurt')])])
# eager load join should limit to only "Engineer"
sess.expunge_all()
- eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(),
- [
- Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
- Company(name='c2', engineers=[Engineer(name='Kurt')])
- ]
- )
+ eq_(sess.query(Company).options(joinedload('engineers')).
+ order_by(Company.name).all(),
+ [Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
+ Company(name='c2', engineers=[Engineer(name='Kurt')])])
# join() to Company.engineers, Employee as the requested entity
sess.expunge_all()
- eq_(sess.query(Company, Employee).join(Company.engineers).order_by(Company.name).all(),
- [
- (Company(name='c1'), JuniorEngineer(name='Ed')),
- (Company(name='c2'), Engineer(name='Kurt'))
- ]
- )
+ eq_(sess.query(Company, Employee)
+ .join(Company.engineers)
+ .order_by(Company.name)
+ .all(),
+ [(Company(name='c1'), JuniorEngineer(name='Ed')),
+ (Company(name='c2'), Engineer(name='Kurt'))])
# join() to Company.engineers, Engineer as the requested entity.
- # this actually applies the IN criterion twice which is less than ideal.
+ # this actually applies the IN criterion twice which is less than
+ # ideal.
sess.expunge_all()
- eq_(sess.query(Company, Engineer).join(Company.engineers).order_by(Company.name).all(),
- [
- (Company(name='c1'), JuniorEngineer(name='Ed')),
- (Company(name='c2'), Engineer(name='Kurt'))
- ]
- )
+ eq_(sess.query(Company, Engineer)
+ .join(Company.engineers)
+ .order_by(Company.name)
+ .all(),
+ [(Company(name='c1'), JuniorEngineer(name='Ed')),
+ (Company(name='c2'), Engineer(name='Kurt'))])
# join() to Company.engineers without any Employee/Engineer entity
sess.expunge_all()
- eq_(sess.query(Company).join(Company.engineers).filter(Engineer.name.in_(['Tom', 'Kurt'])).all(),
- [
- Company(name='c2')
- ]
- )
-
- # this however fails as it does not limit the subtypes to just "Engineer".
- # with joins constructed by filter(), we seem to be following a policy where
- # we don't try to make decisions on how to join to the target class, whereas when using join() we
- # seem to have a lot more capabilities.
- # we might want to document "advantages of join() vs. straight filtering", or add a large
- # section to "inheritance" laying out all the various behaviors Query has.
+ eq_(sess.query(Company).join(Company.engineers).filter(
+ Engineer.name.in_(['Tom', 'Kurt'])).all(), [Company(name='c2')])
+
+ # this however fails as it does not limit the subtypes to just
+ # "Engineer". with joins constructed by filter(), we seem to be
+ # following a policy where we don't try to make decisions on how to
+ # join to the target class, whereas when using join() we seem to have
+ # a lot more capabilities. we might want to document
+ # "advantages of join() vs. straight filtering", or add a large
+ # section to "inheritance" laying out all the various behaviors Query
+ # has.
@testing.fails_on_everything_except()
def go():
sess.expunge_all()
- eq_(sess.query(Company).\
- filter(Company.company_id==Engineer.company_id).filter(Engineer.name.in_(['Tom', 'Kurt'])).all(),
- [
- Company(name='c2')
- ]
- )
+ eq_(sess.query(Company).filter(
+ Company.company_id == Engineer.company_id).filter(
+ Engineer.name.in_(['Tom', 'Kurt'])).all(),
+ [Company(name='c2')])
go()
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('m2m', metadata,
- Column('parent_id', Integer,
- ForeignKey('parent.id'), primary_key=True),
- Column('child_id', Integer,
- ForeignKey('child.id'), primary_key=True),
- )
+ Column('parent_id', Integer,
+ ForeignKey('parent.id'), primary_key=True),
+ Column('child_id', Integer,
+ ForeignKey('child.id'), primary_key=True))
Table('child', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('discriminator', String(20)),
- Column('name', String(20))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('discriminator', String(20)),
+ Column('name', String(20)))
@classmethod
def setup_classes(cls):
@classmethod
def setup_mappers(cls):
mapper(cls.classes.Parent, cls.tables.parent, properties={
- "s1": relationship(cls.classes.SubChild1,
- secondary=cls.tables.m2m,
- uselist=False),
- "s2": relationship(cls.classes.SubChild2,
- secondary=cls.tables.m2m)
- })
+ "s1": relationship(cls.classes.SubChild1,
+ secondary=cls.tables.m2m,
+ uselist=False),
+ "s2": relationship(cls.classes.SubChild2,
+ secondary=cls.tables.m2m)
+ })
mapper(cls.classes.Child, cls.tables.child,
- polymorphic_on=cls.tables.child.c.discriminator)
+ polymorphic_on=cls.tables.child.c.discriminator)
mapper(cls.classes.SubChild1, inherits=cls.classes.Child,
- polymorphic_identity='sub1')
+ polymorphic_identity='sub1')
mapper(cls.classes.SubChild2, inherits=cls.classes.Child,
- polymorphic_identity='sub2')
+ polymorphic_identity='sub2')
@classmethod
def insert_data(cls):
SubChild2 = cls.classes.SubChild2
s = Session()
s.add_all([
- Parent(s1=SubChild1(name='sc1_1'),
- s2=[SubChild2(name="sc2_1"), SubChild2(name="sc2_2")]
- ),
- ])
+ Parent(s1=SubChild1(name='sc1_1'),
+ s2=[SubChild2(name="sc2_1"), SubChild2(name="sc2_2")])])
s.commit()
def test_eager_join(self):
"IN (:discriminator_1)) ON parent.id = m2m_1.parent_id"
)
+
class SingleOnJoinedTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global persons_table, employees_table
persons_table = Table('persons', metadata,
- Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(20), nullable=False)
- )
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(20), nullable=False))
employees_table = Table('employees', metadata,
- Column('person_id', Integer, ForeignKey('persons.person_id'),primary_key=True),
- Column('employee_data', String(50)),
- Column('manager_data', String(50)),
- )
+ Column('person_id', Integer,
+ ForeignKey('persons.person_id'),
+ primary_key=True),
+ Column('employee_data', String(50)),
+ Column('manager_data', String(50)),)
def test_single_on_joined(self):
class Person(fixtures.ComparableEntity):
pass
+
class Employee(Person):
pass
+
class Manager(Employee):
pass
mapper(Person, persons_table, polymorphic_on=persons_table.c.type,
- polymorphic_identity='person')
- mapper(Employee, employees_table, inherits=Person,polymorphic_identity='engineer')
- mapper(Manager, inherits=Employee,polymorphic_identity='manager')
+ polymorphic_identity='person')
+ mapper(Employee, employees_table, inherits=Person,
+ polymorphic_identity='engineer')
+ mapper(Manager, inherits=Employee, polymorphic_identity='manager')
sess = create_session()
sess.add(Person(name='p1'))
sess.expunge_all()
def go():
- eq_(sess.query(Person).with_polymorphic('*').order_by(Person.person_id).all(), [
- Person(name='p1'),
- Employee(name='e1', employee_data='ed1'),
- Manager(name='m1', employee_data='ed2', manager_data='md1')
- ])
+ eq_(sess.query(Person).with_polymorphic('*').order_by(
+ Person.person_id).all(),
+ [Person(name='p1'),
+ Employee(name='e1', employee_data='ed1'),
+ Manager(
+ name='m1', employee_data='ed2', manager_data='md1')])
self.assert_sql_count(testing.db, go, 1)
-
_PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
_PolymorphicAliasedJoins
+
class _WithPolymorphicBase(_PolymorphicFixtureBase):
def test_join_base_to_sub(self):
sess = create_session()
sess = create_session()
pa = with_polymorphic(Person, [Engineer, Manager])
- eq_(
- sess.query(pa.name, pa.Engineer.primary_language, pa.Manager.manager_name).\
- filter(or_(pa.Engineer.primary_language=='java',
- pa.Manager.manager_name=='dogbert')).\
- order_by(pa.Engineer.type).all(),
- [
- ('dilbert', 'java', None),
- ('dogbert', None, 'dogbert'),
- ]
- )
-
+ eq_(sess.query(
+ pa.name, pa.Engineer.primary_language,
+ pa.Manager.manager_name).filter(
+ or_(
+ pa.Engineer.primary_language == 'java', pa.Manager.
+ manager_name
+ == 'dogbert')).order_by(pa.Engineer.type).all(),
+ [('dilbert', 'java', None),
+ ('dogbert', None, 'dogbert'), ])
def test_join_to_join_entities(self):
sess = create_session()
[(p1.name, type(p1), p2.name, type(p2)) for (p1, p2) in sess.query(
pa, pa_alias
).join(pa_alias,
- or_(
- pa.Engineer.primary_language==\
- pa_alias.Engineer.primary_language,
- and_(
- pa.Engineer.primary_language == None,
- pa_alias.Engineer.primary_language == None,
- pa.person_id > pa_alias.person_id
- )
- )
- ).order_by(pa.name, pa_alias.name)],
+ or_(
+ pa.Engineer.primary_language ==
+ pa_alias.Engineer.primary_language,
+ and_(
+ pa.Engineer.primary_language == None, # noqa
+ pa_alias.Engineer.primary_language == None,
+ pa.person_id > pa_alias.person_id
+ ))
+ ).order_by(pa.name, pa_alias.name)],
[
('dilbert', Engineer, 'dilbert', Engineer),
('dogbert', Manager, 'pointy haired boss', Boss),
pa.name, pa.Engineer.primary_language,
pa_alias.name, pa_alias.Engineer.primary_language
).join(pa_alias,
- or_(
- pa.Engineer.primary_language==\
- pa_alias.Engineer.primary_language,
- and_(
- pa.Engineer.primary_language == None,
- pa_alias.Engineer.primary_language == None,
- pa.person_id > pa_alias.person_id
- )
- )
- ).order_by(pa.name, pa_alias.name)],
+ or_(
+ pa.Engineer.primary_language ==
+ pa_alias.Engineer.primary_language,
+ and_(
+ pa.Engineer.primary_language == None, # noqa
+ pa_alias.Engineer.primary_language == None,
+ pa.person_id > pa_alias.person_id
+ ))
+ ).order_by(pa.name, pa_alias.name)],
[
('dilbert', 'java', 'dilbert', 'java'),
('dogbert', None, 'pointy haired boss', None),
]
)
+
class PolymorphicTest(_WithPolymorphicBase, _Polymorphic):
pass
-class PolymorphicPolymorphicTest(_WithPolymorphicBase, _PolymorphicPolymorphic):
+
+class PolymorphicPolymorphicTest(_WithPolymorphicBase,
+ _PolymorphicPolymorphic):
pass
+
class PolymorphicUnionsTest(_WithPolymorphicBase, _PolymorphicUnions):
pass
-class PolymorphicAliasedJoinsTest(_WithPolymorphicBase, _PolymorphicAliasedJoins):
+
+class PolymorphicAliasedJoinsTest(_WithPolymorphicBase,
+ _PolymorphicAliasedJoins):
pass
+
class PolymorphicJoinsTest(_WithPolymorphicBase, _PolymorphicJoins):
pass
@classmethod
def define_tables(cls, metadata):
Table('items', metadata,
- Column('item_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(40)))
+ Column('item_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(40)))
Table('item_keywords', metadata,
- Column('item_id', Integer, ForeignKey('items.item_id')),
- Column('keyword_id', Integer, ForeignKey('keywords.keyword_id')),
- Column('data', String(40)))
+ Column('item_id', Integer, ForeignKey('items.item_id')),
+ Column('keyword_id', Integer, ForeignKey('keywords.keyword_id')),
+ Column('data', String(40)))
Table('keywords', metadata,
- Column('keyword_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(40)))
+ Column('keyword_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(40)))
@classmethod
def setup_classes(cls):
class Item(cls.Basic):
def __init__(self, name):
self.name = name
+
def __repr__(self):
return "Item id=%d name=%s keywordassoc=%r" % (
self.item_id, self.name, self.keywords)
class Keyword(cls.Basic):
def __init__(self, name):
self.name = name
+
def __repr__(self):
return "Keyword id=%d name=%s" % (self.keyword_id, self.name)
def __init__(self, keyword, data):
self.keyword = keyword
self.data = data
+
def __repr__(self):
return "KeywordAssociation itemid=%d keyword=%r data=%s" % (
self.item_id, self.keyword, self.data)
@classmethod
def setup_mappers(cls):
KeywordAssociation, Item, Keyword = (cls.classes.KeywordAssociation,
- cls.classes.Item,
- cls.classes.Keyword)
+ cls.classes.Item,
+ cls.classes.Keyword)
items, item_keywords, keywords = cls.tables.get_all(
'items', 'item_keywords', 'keywords')
mapper(Keyword, keywords)
mapper(KeywordAssociation, item_keywords, properties={
- 'keyword':relationship(Keyword, lazy='joined')},
- primary_key=
- [item_keywords.c.item_id, item_keywords.c.keyword_id])
+ 'keyword': relationship(Keyword, lazy='joined')},
+ primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id])
mapper(Item, items, properties={
- 'keywords' : relationship(KeywordAssociation,
- order_by=item_keywords.c.data,
- cascade="all, delete-orphan")
+ 'keywords': relationship(KeywordAssociation,
+ order_by=item_keywords.c.data,
+ cascade="all, delete-orphan")
})
def test_insert(self):
KeywordAssociation, Item, Keyword = (self.classes.KeywordAssociation,
- self.classes.Item,
- self.classes.Keyword)
+ self.classes.Item,
+ self.classes.Keyword)
sess = create_session()
item1 = Item('item1')
item2 = Item('item2')
- item1.keywords.append(KeywordAssociation(Keyword('blue'), 'blue_assoc'))
+ item1.keywords.append(KeywordAssociation(
+ Keyword('blue'), 'blue_assoc'))
item1.keywords.append(KeywordAssociation(Keyword('red'), 'red_assoc'))
- item2.keywords.append(KeywordAssociation(Keyword('green'), 'green_assoc'))
+ item2.keywords.append(KeywordAssociation(
+ Keyword('green'), 'green_assoc'))
sess.add_all((item1, item2))
sess.flush()
saved = repr([item1, item2])
sess.expunge_all()
- l = sess.query(Item).all()
- loaded = repr(l)
+ result = sess.query(Item).all()
+ loaded = repr(result)
eq_(saved, loaded)
def test_replace(self):
KeywordAssociation, Item, Keyword = (self.classes.KeywordAssociation,
- self.classes.Item,
- self.classes.Keyword)
+ self.classes.Item,
+ self.classes.Keyword)
sess = create_session()
item1 = Item('item1')
- item1.keywords.append(KeywordAssociation(Keyword('blue'), 'blue_assoc'))
+ item1.keywords.append(KeywordAssociation(
+ Keyword('blue'), 'blue_assoc'))
item1.keywords.append(KeywordAssociation(Keyword('red'), 'red_assoc'))
sess.add(item1)
sess.flush()
sess.flush()
saved = repr([item1])
sess.expunge_all()
- l = sess.query(Item).all()
- loaded = repr(l)
+ result = sess.query(Item).all()
+ loaded = repr(result)
eq_(saved, loaded)
def test_modify(self):
KeywordAssociation, Item, Keyword = (self.classes.KeywordAssociation,
- self.classes.Item,
- self.classes.Keyword)
+ self.classes.Item,
+ self.classes.Keyword)
sess = create_session()
item1 = Item('item1')
item2 = Item('item2')
- item1.keywords.append(KeywordAssociation(Keyword('blue'), 'blue_assoc'))
+ item1.keywords.append(KeywordAssociation(
+ Keyword('blue'), 'blue_assoc'))
item1.keywords.append(KeywordAssociation(Keyword('red'), 'red_assoc'))
- item2.keywords.append(KeywordAssociation(Keyword('green'), 'green_assoc'))
+ item2.keywords.append(KeywordAssociation(
+ Keyword('green'), 'green_assoc'))
sess.add_all((item1, item2))
sess.flush()
del item1.keywords[0]
purple_keyword = Keyword('purple')
item1.keywords.append(KeywordAssociation(red_keyword, 'new_red_assoc'))
- item2.keywords.append(KeywordAssociation(purple_keyword, 'purple_item2_assoc'))
- item1.keywords.append(KeywordAssociation(purple_keyword, 'purple_item1_assoc'))
- item1.keywords.append(KeywordAssociation(Keyword('yellow'), 'yellow_assoc'))
+ item2.keywords.append(KeywordAssociation(
+ purple_keyword, 'purple_item2_assoc'))
+ item1.keywords.append(KeywordAssociation(
+ purple_keyword, 'purple_item1_assoc'))
+ item1.keywords.append(KeywordAssociation(
+ Keyword('yellow'), 'yellow_assoc'))
sess.flush()
saved = repr([item1, item2])
sess.expunge_all()
- l = sess.query(Item).all()
- loaded = repr(l)
+ result = sess.query(Item).all()
+ loaded = repr(result)
eq_(saved, loaded)
def test_delete(self):
- KeywordAssociation, Item, item_keywords, Keyword = (self.classes.KeywordAssociation,
- self.classes.Item,
- self.tables.item_keywords,
- self.classes.Keyword)
+ KeywordAssociation = self.classes.KeywordAssociation
+ Item = self.classes.Item
+ item_keywords = self.tables.item_keywords
+ Keyword = self.classes.Keyword
sess = create_session()
item1 = Item('item1')
item2 = Item('item2')
- item1.keywords.append(KeywordAssociation(Keyword('blue'), 'blue_assoc'))
+ item1.keywords.append(KeywordAssociation(
+ Keyword('blue'), 'blue_assoc'))
item1.keywords.append(KeywordAssociation(Keyword('red'), 'red_assoc'))
- item2.keywords.append(KeywordAssociation(Keyword('green'), 'green_assoc'))
+ item2.keywords.append(KeywordAssociation(
+ Keyword('green'), 'green_assoc'))
sess.add_all((item1, item2))
sess.flush()
eq_(select([func.count('*')]).select_from(item_keywords).scalar(), 3)
sess.delete(item2)
sess.flush()
eq_(select([func.count('*')]).select_from(item_keywords).scalar(), 0)
-
-
cls.other['false'] = false
- Table('owners', metadata ,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Table('owners', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)))
Table('categories', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(20)))
- Table('tests', metadata ,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Table('tests', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('owner_id', Integer, ForeignKey('owners.id'),
nullable=False),
Column('category_id', Integer, ForeignKey('categories.id'),
nullable=False))
- Table('options', metadata ,
- Column('test_id', Integer, ForeignKey('tests.id'), primary_key=True),
- Column('owner_id', Integer, ForeignKey('owners.id'), primary_key=True),
- Column('someoption', sa.Boolean, server_default=false, nullable=False))
+ Table('options', metadata,
+ Column('test_id', Integer, ForeignKey('tests.id'),
+ primary_key=True),
+ Column('owner_id', Integer, ForeignKey('owners.id'),
+ primary_key=True),
+ Column('someoption', sa.Boolean, server_default=false,
+ nullable=False))
@classmethod
def setup_classes(cls):
@classmethod
def setup_mappers(cls):
- Category, owners, Option, tests, Thing, Owner, options, categories = (cls.classes.Category,
- cls.tables.owners,
- cls.classes.Option,
- cls.tables.tests,
- cls.classes.Thing,
- cls.classes.Owner,
- cls.tables.options,
- cls.tables.categories)
+ Category, owners, Option, tests, Thing, Owner, options, categories = (
+ cls.classes.Category,
+ cls.tables.owners,
+ cls.classes.Option,
+ cls.tables.tests,
+ cls.classes.Thing,
+ cls.classes.Owner,
+ cls.tables.options,
+ cls.tables.categories)
mapper(Owner, owners)
mapper(Thing, tests, properties=dict(
owner=relationship(Owner, backref='tests'),
category=relationship(Category),
- owner_option=relationship(Option,
- primaryjoin=sa.and_(tests.c.id == options.c.test_id,
- tests.c.owner_id == options.c.owner_id),
+ owner_option=relationship(
+ Option, primaryjoin=sa.and_(
+ tests.c.id == options.c.test_id,
+ tests.c.owner_id == options.c.owner_id),
foreign_keys=[options.c.test_id, options.c.owner_id],
uselist=False)))
@classmethod
def insert_data(cls):
Owner, Category, Option, Thing = (cls.classes.Owner,
- cls.classes.Category,
- cls.classes.Option,
- cls.classes.Thing)
+ cls.classes.Category,
+ cls.classes.Option,
+ cls.classes.Thing)
session = create_session()
"""test the control case"""
tests, options, categories = (self.tables.tests,
- self.tables.options,
- self.tables.categories)
+ self.tables.options,
+ self.tables.categories)
# I want to display a list of tests owned by owner 1
# if someoption is false or they haven't specified it yet (null)
# not orm style correct query
print("Obtaining correct results without orm")
result = sa.select(
- [tests.c.id,categories.c.name],
+ [tests.c.id, categories.c.name],
sa.and_(tests.c.owner_id == 1,
- sa.or_(options.c.someoption==None,
- options.c.someoption==False)),
+ sa.or_(options.c.someoption == None, # noqa
+ options.c.someoption == False)),
order_by=[tests.c.id],
from_obj=[tests.join(categories).outerjoin(options, sa.and_(
tests.c.id == options.c.test_id,
tests.c.owner_id == options.c.owner_id))]
- ).execute().fetchall()
+ ).execute().fetchall()
eq_(result, [(1, 'Some Category'), (3, 'Some Category')])
def test_withoutjoinedload(self):
Thing, tests, options = (self.classes.Thing,
- self.tables.tests,
- self.tables.options)
+ self.tables.tests,
+ self.tables.options)
s = create_session()
- l = (s.query(Thing).
- select_from(tests.outerjoin(options,
- sa.and_(tests.c.id == options.c.test_id,
- tests.c.owner_id ==
- options.c.owner_id))).
- filter(sa.and_(tests.c.owner_id==1,
- sa.or_(options.c.someoption==None,
- options.c.someoption==False))))
-
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, ['1 Some Category', '3 Some Category'])
+ result = (s.query(Thing)
+ .select_from(tests.outerjoin(
+ options,
+ sa.and_(tests.c.id == options.c.test_id,
+ tests.c.owner_id == options.c.owner_id)))
+ .filter(sa.and_(
+ tests.c.owner_id == 1,
+ sa.or_(options.c.someoption == None, # noqa
+ options.c.someoption == False))))
+
+ result_str = ["%d %s" % (t.id, t.category.name) for t in result]
+ eq_(result_str, ['1 Some Category', '3 Some Category'])
def test_withjoinedload(self):
"""
"""
Thing, tests, options = (self.classes.Thing,
- self.tables.tests,
- self.tables.options)
+ self.tables.tests,
+ self.tables.options)
s = create_session()
- q=s.query(Thing).options(sa.orm.joinedload('category'))
+ q = s.query(Thing).options(sa.orm.joinedload('category'))
- l=(q.select_from(tests.outerjoin(options,
- sa.and_(tests.c.id ==
- options.c.test_id,
- tests.c.owner_id ==
- options.c.owner_id))).
- filter(sa.and_(tests.c.owner_id == 1,
- sa.or_(options.c.someoption==None,
- options.c.someoption==False))))
+ result = (q.select_from(tests.outerjoin(options,
+ sa.and_(tests.c.id ==
+ options.c.test_id,
+ tests.c.owner_id ==
+ options.c.owner_id))).
+ filter(sa.and_(tests.c.owner_id == 1,
+ sa.or_(options.c.someoption == None, # noqa
+ options.c.someoption == False))))
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, ['1 Some Category', '3 Some Category'])
+ result_str = ["%d %s" % (t.id, t.category.name) for t in result]
+ eq_(result_str, ['1 Some Category', '3 Some Category'])
def test_dslish(self):
"""test the same as withjoinedload except using generative"""
Thing, tests, options = (self.classes.Thing,
- self.tables.tests,
- self.tables.options)
+ self.tables.tests,
+ self.tables.options)
s = create_session()
q = s.query(Thing).options(sa.orm.joinedload('category'))
- l = q.filter (
+ result = q.filter(
sa.and_(tests.c.owner_id == 1,
- sa.or_(options.c.someoption == None,
+ sa.or_(options.c.someoption == None, # noqa
options.c.someoption == False))
- ).outerjoin('owner_option')
+ ).outerjoin('owner_option')
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, ['1 Some Category', '3 Some Category'])
+ result_str = ["%d %s" % (t.id, t.category.name) for t in result]
+ eq_(result_str, ['1 Some Category', '3 Some Category'])
@testing.crashes('sybase', 'FIXME: unknown, verify not fails_on')
def test_without_outerjoin_literal(self):
Thing, tests, false = (self.classes.Thing,
- self.tables.tests,
- self.other.false)
+ self.tables.tests,
+ self.other.false)
s = create_session()
q = s.query(Thing).options(sa.orm.joinedload('category'))
- l = (q.filter(
- (tests.c.owner_id==1) &
- text('options.someoption is null or options.someoption=%s' % false)).
- join('owner_option'))
+ result = (q.filter(
+ (tests.c.owner_id == 1) &
+ text(
+ 'options.someoption is null or options.someoption=%s' %
+ false)).join('owner_option'))
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, ['3 Some Category'])
+ result_str = ["%d %s" % (t.id, t.category.name) for t in result]
+ eq_(result_str, ['3 Some Category'])
def test_withoutouterjoin(self):
Thing, tests, options = (self.classes.Thing,
- self.tables.tests,
- self.tables.options)
+ self.tables.tests,
+ self.tables.options)
s = create_session()
q = s.query(Thing).options(sa.orm.joinedload('category'))
- l = q.filter(
- (tests.c.owner_id==1) &
- ((options.c.someoption==None) | (options.c.someoption==False))
- ).join('owner_option')
+ result = q.filter(
+ (tests.c.owner_id == 1) &
+ ((options.c.someoption == None) | (options.c.someoption == False)) # noqa
+ ).join('owner_option')
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, ['3 Some Category'])
+ result_str = ["%d %s" % (t.id, t.category.name) for t in result]
+ eq_(result_str, ['3 Some Category'])
class EagerTest2(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('left', metadata,
- Column('id', Integer, ForeignKey('middle.id'), primary_key=True),
- Column('data', String(50), primary_key=True))
+ Column('id', Integer, ForeignKey('middle.id'), primary_key=True),
+ Column('data', String(50), primary_key=True))
Table('middle', metadata,
- Column('id', Integer, primary_key = True, test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('right', metadata,
- Column('id', Integer, ForeignKey('middle.id'), primary_key=True),
- Column('data', String(50), primary_key=True))
+ Column('id', Integer, ForeignKey('middle.id'), primary_key=True),
+ Column('data', String(50), primary_key=True))
@classmethod
def setup_classes(cls):
@classmethod
def setup_mappers(cls):
Right, Middle, middle, right, left, Left = (cls.classes.Right,
- cls.classes.Middle,
- cls.tables.middle,
- cls.tables.right,
- cls.tables.left,
- cls.classes.Left)
+ cls.classes.Middle,
+ cls.tables.middle,
+ cls.tables.right,
+ cls.tables.left,
+ cls.classes.Left)
# set up bi-directional eager loads
mapper(Left, left)
mapper(Right, right)
mapper(Middle, middle, properties=dict(
left=relationship(Left,
- lazy='joined',
- backref=backref('middle',lazy='joined')),
+ lazy='joined',
+ backref=backref('middle', lazy='joined')),
right=relationship(Right,
- lazy='joined',
- backref=backref('middle', lazy='joined')))),
+ lazy='joined',
+ backref=backref('middle', lazy='joined')))),
def test_eager_terminate(self):
"""Eager query generation does not include the same mapper's table twice.
"""
Middle, Right, Left = (self.classes.Middle,
- self.classes.Right,
- self.classes.Left)
+ self.classes.Right,
+ self.classes.Left)
p = Middle('m1')
p.left.append(Left('l1'))
class EagerTest3(fixtures.MappedTest):
- """Eager loading combined with nested SELECT statements, functions, and aggregates."""
+ """Eager loading combined with nested SELECT statements, functions, and
+ aggregates."""
@classmethod
def define_tables(cls, metadata):
Table('datas', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('a', Integer, nullable=False))
Table('foo', metadata,
- Column('data_id', Integer, ForeignKey('datas.id'),primary_key=True),
+ Column('data_id', Integer, ForeignKey('datas.id'),
+ primary_key=True),
Column('bar', Integer))
Table('stats', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data_id', Integer, ForeignKey('datas.id')),
- Column('somedata', Integer, nullable=False ))
+ Column('somedata', Integer, nullable=False))
@classmethod
def setup_classes(cls):
def test_nesting_with_functions(self):
Stat, Foo, stats, foo, Data, datas = (self.classes.Stat,
- self.classes.Foo,
- self.tables.stats,
- self.tables.foo,
- self.classes.Data,
- self.tables.datas)
+ self.classes.Foo,
+ self.tables.stats,
+ self.tables.foo,
+ self.classes.Data,
+ self.tables.datas)
mapper(Data, datas)
mapper(Foo, foo, properties={
- 'data': relationship(Data,backref=backref('foo',uselist=False))})
+ 'data': relationship(Data, backref=backref('foo', uselist=False))})
mapper(Stat, stats, properties={
- 'data':relationship(Data)})
+ 'data': relationship(Data)})
session = create_session()
arb_result = arb_data.execute().fetchall()
# order the result list descending based on 'max'
- arb_result.sort(key = lambda a: a['max'], reverse=True)
+ arb_result.sort(key=lambda a: a['max'], reverse=True)
# extract just the "data_id" from it
arb_result = [row['data_id'] for row in arb_result]
# "order by max desc" separately
q = (session.query(Data).
options(sa.orm.joinedload('foo')).
- select_from(datas.join(arb_data, arb_data.c.data_id == datas.c.id)).
+ select_from(datas.join(arb_data,
+ arb_data.c.data_id == datas.c.id)).
order_by(sa.desc(arb_data.c.max)).
limit(10))
eq_(verify_result, arb_result)
+
class EagerTest4(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('departments', metadata,
- Column('department_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('department_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)))
Table('employees', metadata,
- Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)),
Column('department_id', Integer,
ForeignKey('departments.department_id')))
pass
def test_basic(self):
- Department, Employee, employees, departments = (self.classes.Department,
- self.classes.Employee,
- self.tables.employees,
- self.tables.departments)
+ Department, Employee, employees, departments = (
+ self.classes.Department, self.classes.Employee,
+ self.tables.employees, self.tables.departments)
mapper(Employee, employees)
mapper(Department, departments, properties=dict(
employees=relationship(Employee,
- lazy='joined',
- backref='department')))
+ lazy='joined',
+ backref='department')))
d1 = Department(name='One')
for e in 'Jim', 'Jack', 'John', 'Susan':
class EagerTest5(fixtures.MappedTest):
- """Construction of AliasedClauses for the same eager load property but different parent mappers, due to inheritance."""
+ """Construction of AliasedClauses for the same eager load property but
+ different parent mappers, due to inheritance."""
@classmethod
def define_tables(cls, metadata):
Column('x', String(30)))
Table('derived', metadata,
- Column('uid', String(30), ForeignKey('base.uid'), primary_key=True),
+ Column('uid', String(30),
+ ForeignKey('base.uid'),
+ primary_key=True),
Column('y', String(30)))
Table('derivedII', metadata,
- Column('uid', String(30), ForeignKey('base.uid'), primary_key=True),
+ Column('uid', String(30),
+ ForeignKey('base.uid'),
+ primary_key=True),
Column('z', String(30)))
Table('comments', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('uid', String(30), ForeignKey('base.uid')),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('uid', String(30),
+ ForeignKey('base.uid')),
Column('comment', String(30)))
@classmethod
self.comment = comment
def test_basic(self):
- Comment, Derived, derived, comments, DerivedII, Base, base, derivedII = (self.classes.Comment,
- self.classes.Derived,
- self.tables.derived,
- self.tables.comments,
- self.classes.DerivedII,
- self.classes.Base,
- self.tables.base,
- self.tables.derivedII)
+ Comment, Derived, derived, comments, \
+ DerivedII, Base, base, derivedII = (self.classes.Comment,
+ self.classes.Derived,
+ self.tables.derived,
+ self.tables.comments,
+ self.classes.DerivedII,
+ self.classes.Base,
+ self.tables.base,
+ self.tables.derivedII)
commentMapper = mapper(Comment, comments)
baseMapper = mapper(Base, base, properties=dict(
comments=relationship(Comment, lazy='joined',
- cascade='all, delete-orphan')))
+ cascade='all, delete-orphan')))
mapper(Derived, derived, inherits=baseMapper)
@classmethod
def define_tables(cls, metadata):
Table('design_types', metadata,
- Column('design_type_id', Integer, primary_key=True, test_needs_autoincrement=True))
+ Column('design_type_id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('design', metadata,
- Column('design_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('design_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('design_type_id', Integer,
ForeignKey('design_types.design_type_id')))
Table('parts', metadata,
- Column('part_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('part_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('design_id', Integer, ForeignKey('design.design_id')),
Column('design_type_id', Integer,
ForeignKey('design_types.design_type_id')))
Table('inherited_part', metadata,
- Column('ip_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('ip_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('part_id', Integer, ForeignKey('parts.part_id')),
Column('design_id', Integer, ForeignKey('design.design_id')))
pass
def test_one(self):
- Part, inherited_part, design_types, DesignType, parts, design, Design, InheritedPart = (self.classes.Part,
- self.tables.inherited_part,
- self.tables.design_types,
- self.classes.DesignType,
- self.tables.parts,
- self.tables.design,
- self.classes.Design,
- self.classes.InheritedPart)
+ Part, inherited_part, design_types, DesignType, \
+ parts, design, Design, InheritedPart = (self.classes.Part,
+ self.tables.inherited_part,
+ self.tables.design_types,
+ self.classes.DesignType,
+ self.tables.parts,
+ self.tables.design,
+ self.classes.Design,
+ self.classes.InheritedPart)
p_m = mapper(Part, parts)
d_m = mapper(Design, design, properties=dict(
inheritedParts=relationship(InheritedPart,
- cascade="all, delete-orphan",
- backref="design")))
+ cascade="all, delete-orphan",
+ backref="design")))
mapper(DesignType, design_types)
Design, lazy='joined',
backref=backref("parts", cascade="all, delete-orphan")))
-
d = Design()
sess = create_session()
sess.add(d)
@classmethod
def define_tables(cls, metadata):
Table('companies', metadata,
- Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('company_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('company_name', String(40)))
Table('addresses', metadata,
- Column('address_id', Integer, primary_key=True,test_needs_autoincrement=True),
- Column('company_id', Integer, ForeignKey("companies.company_id")),
+ Column('address_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('company_id', Integer,
+ ForeignKey("companies.company_id")),
Column('address', String(40)))
Table('phone_numbers', metadata,
- Column('phone_id', Integer, primary_key=True,test_needs_autoincrement=True),
- Column('address_id', Integer, ForeignKey('addresses.address_id')),
+ Column('phone_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('address_id', Integer,
+ ForeignKey('addresses.address_id')),
Column('type', String(20)),
Column('number', String(10)))
Table('invoices', metadata,
- Column('invoice_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('company_id', Integer, ForeignKey("companies.company_id")),
+ Column('invoice_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('company_id', Integer,
+ ForeignKey("companies.company_id")),
Column('date', sa.DateTime))
@classmethod
"""
- addresses, invoices, Company, companies, Invoice, Address = (self.tables.addresses,
- self.tables.invoices,
- self.classes.Company,
- self.tables.companies,
- self.classes.Invoice,
- self.classes.Address)
+ addresses, invoices, Company, companies, Invoice, Address = (
+ self.tables.addresses, self.tables.invoices, self.classes.Company,
+ self.tables.companies, self.classes.Invoice, self.classes.Address)
mapper(Address, addresses)
mapper(Company, companies, properties={
- 'addresses' : relationship(Address, lazy='joined')})
+ 'addresses': relationship(Address, lazy='joined')})
mapper(Invoice, invoices, properties={
'company': relationship(Company, lazy='joined')})
c1 = Company(company_name='company 1', addresses=[a1, a2])
i1 = Invoice(date=datetime.datetime.now(), company=c1)
-
session = create_session()
session.add(i1)
session.flush()
self.assert_sql_count(testing.db, go, 0)
-
class EagerTest8(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('prj', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('created', sa.DateTime ),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('created', sa.DateTime),
Column('title', sa.String(100)))
Table('task', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('status_id', Integer,
- ForeignKey('task_status.id'), nullable=False),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('status_id', Integer, ForeignKey('task_status.id'),
+ nullable=False),
Column('title', sa.String(100)),
- Column('task_type_id', Integer ,
- ForeignKey('task_type.id'), nullable=False),
- Column('prj_id', Integer , ForeignKey('prj.id'), nullable=False))
+ Column('task_type_id', Integer, ForeignKey('task_type.id'),
+ nullable=False),
+ Column('prj_id', Integer, ForeignKey('prj.id'),
+ nullable=False))
Table('task_status', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('task_type', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('msg', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('posted', sa.DateTime, index=True,),
Column('type_id', Integer, ForeignKey('msg_type.id')),
Column('task_id', Integer, ForeignKey('task.id')))
Table('msg_type', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', sa.String(20)),
Column('display_name', sa.String(20)))
def test_nested_joins(self):
task, Task_Type, Joined, prj, task_type, msg = (self.tables.task,
- self.classes.Task_Type,
- self.classes.Joined,
- self.tables.prj,
- self.tables.task_type,
- self.tables.msg)
+ self.classes.Task_Type,
+ self.classes.Joined,
+ self.tables.prj,
+ self.tables.task_type,
+ self.tables.msg)
# this is testing some subtle column resolution stuff,
# concerning corresponding_column() being extremely accurate
mapper(Task_Type, task_type)
- tsk_cnt_join = sa.outerjoin(prj, task, task.c.prj_id==prj.c.id)
+ tsk_cnt_join = sa.outerjoin(prj, task, task.c.prj_id == prj.c.id)
- j = sa.outerjoin(task, msg, task.c.id==msg.c.task_id)
- jj = sa.select([ task.c.id.label('task_id'),
- sa.func.count(msg.c.id).label('props_cnt')],
- from_obj=[j],
- group_by=[task.c.id]).alias('prop_c_s')
+ j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id)
+ jj = sa.select([task.c.id.label('task_id'),
+ sa.func.count(msg.c.id).label('props_cnt')],
+ from_obj=[j],
+ group_by=[task.c.id]).alias('prop_c_s')
jjj = sa.join(task, jj, task.c.id == jj.c.task_id)
mapper(Joined, jjj, properties=dict(
@classmethod
def define_tables(cls, metadata):
Table('accounts', metadata,
- Column('account_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(40)))
+ Column('account_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(40)))
Table('transactions', metadata,
- Column('transaction_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(40)))
+ Column('transaction_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(40)))
Table('entries', metadata,
- Column('entry_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(40)),
- Column('account_id', Integer,
- ForeignKey('accounts.account_id')),
- Column('transaction_id', Integer,
- ForeignKey('transactions.transaction_id')))
+ Column('entry_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(40)),
+ Column('account_id', Integer, ForeignKey('accounts.account_id')),
+ Column('transaction_id', Integer,
+ ForeignKey('transactions.transaction_id')))
@classmethod
def setup_classes(cls):
@classmethod
def setup_mappers(cls):
- Account, Transaction, transactions, accounts, entries, Entry = (cls.classes.Account,
- cls.classes.Transaction,
- cls.tables.transactions,
- cls.tables.accounts,
- cls.tables.entries,
- cls.classes.Entry)
+ Account, Transaction, transactions, accounts, entries, Entry = (
+ cls.classes.Account, cls.classes.Transaction, cls.tables.
+ transactions, cls.tables.accounts, cls.tables.entries, cls.classes.
+ Entry)
mapper(Account, accounts)
mapper(Transaction, transactions)
- mapper(Entry, entries, properties=dict(
- account=relationship(Account,
- uselist=False,
- backref=backref('entries', lazy='select',
- order_by=entries.c.entry_id)),
- transaction=relationship(Transaction,
- uselist=False,
- backref=backref('entries', lazy='joined',
- order_by=entries.c.entry_id))))
+ mapper(
+ Entry, entries,
+ properties=dict(
+ account=relationship(
+ Account, uselist=False,
+ backref=backref(
+ 'entries', lazy='select',
+ order_by=entries.c.entry_id)),
+ transaction=relationship(
+ Transaction, uselist=False,
+ backref=backref(
+ 'entries', lazy='joined',
+ order_by=entries.c.entry_id))))
def test_joinedload_on_path(self):
Entry, Account, Transaction = (self.classes.Entry,
- self.classes.Account,
- self.classes.Transaction)
+ self.classes.Account,
+ self.classes.Transaction)
session = create_session()
# all objects saved thus far, but will not eagerly load the
# "accounts" off the immediate "entries"; only the "accounts" off
# the entries->transaction->entries
- acc = (session.query(Account).
- options(sa.orm.joinedload_all('entries.transaction.entries.account')).
- order_by(Account.account_id)).first()
+ acc = (session.query(Account).options(
+ sa.orm.joinedload_all(
+ 'entries.transaction.entries.account')).order_by(
+ Account.account_id)).first()
# no sql occurs
eq_(acc.name, 'acc1')
assert e.account is acc
self.assert_sql_count(testing.db, go, 1)
-
-
-
def _scalar_obj_fixture(self):
class A(object):
pass
+
class B(object):
pass
instrumentation.register_class(A)
def _collection_obj_fixture(self):
class A(object):
pass
+
class B(object):
pass
instrumentation.register_class(A)
"Object <B at .*?> not "
"associated with <A at .*?> on attribute 'b'",
A.b.impl.remove,
- attributes.instance_state(a1),
- attributes.instance_dict(a1), b2, None
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1), b2, None
)
def test_scalar_obj_pop_invalid(self):
ValueError,
r"list.remove\(.*?\): .* not in list",
A.b.impl.remove,
- attributes.instance_state(a1),
- attributes.instance_dict(a1), b2, None
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1), b2, None
)
def test_collection_obj_pop_invalid(self):
class AttributesTest(fixtures.ORMTest):
def setup(self):
global MyTest, MyTest2
- class MyTest(object): pass
- class MyTest2(object): pass
+
+ class MyTest(object):
+ pass
+
+ class MyTest2(object):
+ pass
def teardown(self):
global MyTest, MyTest2
instrumentation.register_class(User)
attributes.register_attribute(User, 'user_id', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(User, 'user_name', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(User, 'email_address',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
u = User()
u.user_id = 7
u.user_name = 'john'
instrumentation.register_class(MyTest)
instrumentation.register_class(MyTest2)
attributes.register_attribute(MyTest, 'user_id', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(MyTest, 'user_name',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
attributes.register_attribute(MyTest, 'email_address',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
attributes.register_attribute(MyTest2, 'a', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(MyTest2, 'b', uselist=False,
- useobject=False)
+ useobject=False)
# shouldn't be pickling callables at the class level
return None
attributes.register_attribute(MyTest, 'mt2', uselist=True,
- trackparent=True, callable_=somecallable,
- useobject=True)
+ trackparent=True, callable_=somecallable,
+ useobject=True)
o = MyTest()
o.mt2.append(MyTest2())
- o.user_id=7
+ o.user_id = 7
o.mt2[0].a = 'abcde'
pk_o = pickle.dumps(o)
o2 = pickle.loads(pk_o)
pk_o2 = pickle.dumps(o2)
-
# the above is kind of distrurbing, so let's do it again a little
# differently. the string-id in serialization thing is just an
# artifact of pickling that comes up in the first round-trip.
def test_object_dereferenced_error(self):
class Foo(object):
pass
+
class Bar(object):
def __init__(self):
gc_collect()
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo,
- 'bars',
- uselist=True,
- useobject=True)
+ 'bars',
+ uselist=True,
+ useobject=True)
assert_raises_message(
orm_exc.ObjectDereferencedError,
class Foo(object):
pass
- data = {'a':'this is a', 'b':12}
+ data = {'a': 'this is a', 'b': 12}
+
def loader(state, keys):
for k in keys:
state.dict[k] = data[k]
f = Foo()
attributes.instance_state(f)._expire(attributes.instance_dict(f),
- set())
+ set())
eq_(f.a, 'this is a')
eq_(f.b, 12)
f.a = 'this is some new a'
attributes.instance_state(f)._expire(attributes.instance_dict(f),
- set())
+ set())
eq_(f.a, 'this is a')
eq_(f.b, 12)
attributes.instance_state(f)._expire(attributes.instance_dict(f),
- set())
+ set())
f.a = 'this is another new a'
eq_(f.a, 'this is another new a')
eq_(f.b, 12)
attributes.instance_state(f)._expire(attributes.instance_dict(f),
- set())
+ set())
eq_(f.a, 'this is a')
eq_(f.b, 12)
del f.a
eq_(f.a, None)
eq_(f.b, 12)
attributes.instance_state(f)._commit_all(attributes.instance_dict(f),
- set())
+ set())
eq_(f.a, None)
eq_(f.b, 12)
def test_deferred_pickleable(self):
- data = {'a':'this is a', 'b':12}
+ data = {'a': 'this is a', 'b': 12}
+
def loader(state, keys):
for k in keys:
state.dict[k] = data[k]
instrumentation.register_class(MyTest)
manager = attributes.manager_of_class(MyTest)
- manager.deferred_scalar_loader=loader
- attributes.register_attribute(MyTest, 'a', uselist=False, useobject=False)
- attributes.register_attribute(MyTest, 'b', uselist=False, useobject=False)
+ manager.deferred_scalar_loader = loader
+ attributes.register_attribute(MyTest, 'a', uselist=False,
+ useobject=False)
+ attributes.register_attribute(MyTest, 'b', uselist=False,
+ useobject=False)
m = MyTest()
- attributes.instance_state(m)._expire(attributes.instance_dict(m), set())
+ attributes.instance_state(m)._expire(attributes.instance_dict(m),
+ set())
assert 'a' not in m.__dict__
m2 = pickle.loads(pickle.dumps(m))
assert 'a' not in m2.__dict__
eq_(m2.b, 12)
def test_list(self):
- class User(object):pass
- class Address(object):pass
+ class User(object):
+ pass
+
+ class Address(object):
+ pass
instrumentation.register_class(User)
instrumentation.register_class(Address)
attributes.register_attribute(User, 'user_id', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(User, 'user_name', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(User, 'addresses', uselist=True,
- useobject=True)
+ useobject=True)
attributes.register_attribute(Address, 'address_id',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
attributes.register_attribute(Address, 'email_address',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
u = User()
u.user_id = 7
eq_(u.user_id, 7)
eq_(u.user_name, 'heythere')
- eq_(u.addresses[0].email_address,'lala@123.com')
- eq_(u.addresses[1].email_address,'foo@bar.com')
+ eq_(u.addresses[0].email_address, 'lala@123.com')
+ eq_(u.addresses[1].email_address, 'foo@bar.com')
def test_extension_commit_attr(self):
"""test that an extension which commits attribute history
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
return b2
attributes.register_attribute(Foo, 'bars',
- uselist=True,
- useobject=True,
- callable_=loadcollection,
- extension=[ReceiveEvents('bars')])
+ uselist=True,
+ useobject=True,
+ callable_=loadcollection,
+ extension=[ReceiveEvents('bars')])
attributes.register_attribute(Foo, 'bar',
- uselist=False,
- useobject=True,
- callable_=loadscalar,
- extension=[ReceiveEvents('bar')])
+ uselist=False,
+ useobject=True,
+ callable_=loadscalar,
+ extension=[ReceiveEvents('bar')])
attributes.register_attribute(Foo, 'scalar',
- uselist=False,
- useobject=False, extension=[ReceiveEvents('scalar')])
-
+ uselist=False,
+ useobject=False,
+ extension=[ReceiveEvents('scalar')])
def create_hist():
def hist(key, shouldmatch, fn, *arg):
- attributes.instance_state(f1)._commit_all(attributes.instance_dict(f1))
+ attributes.instance_state(f1)._commit_all(
+ attributes.instance_dict(f1))
fn(*arg)
histories.append((shouldmatch,
- attributes.get_history(f1, key)))
+ attributes.get_history(f1, key)))
f1 = Foo()
hist('bars', True, f1.bars.append, b3)
def test_extension_lazyload_assertion(self):
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
instrumentation.register_class(Bar)
bar1, bar2, bar3 = [Bar(id=1), Bar(id=2), Bar(id=3)]
+
def func1(state, passive):
if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
return [bar1, bar2, bar3]
attributes.register_attribute(Foo, 'bars', uselist=True,
- callable_=func1, useobject=True,
- extension=[ReceiveEvents()])
+ callable_=func1, useobject=True,
+ extension=[ReceiveEvents()])
attributes.register_attribute(Bar, 'foos', uselist=True,
- useobject=True, backref='bars')
+ useobject=True, backref='bars')
x = Foo()
assert_raises(AssertionError, Bar(id=4).foos.append, x)
x.bars
b = Bar(id=4)
b.foos.append(x)
- attributes.instance_state(x)._expire_attributes(attributes.instance_dict(x),
- ['bars'])
+ attributes.instance_state(x)._expire_attributes(
+ attributes.instance_dict(x), ['bars'])
assert_raises(AssertionError, b.foos.remove, x)
-
def test_scalar_listener(self):
# listeners on ScalarAttributeImpl aren't used normally. test that
pass
results = []
+
class ReceiveEvents(AttributeExtension):
def append(self, state, child, initiator):
assert False
return child
instrumentation.register_class(Foo)
- attributes.register_attribute(Foo, 'x', uselist=False,
- useobject=False,
- extension=ReceiveEvents())
+ attributes.register_attribute(Foo, 'x', uselist=False, useobject=False,
+ extension=ReceiveEvents())
f = Foo()
f.x = 5
class Post(object):
pass
+
class Blog(object):
pass
instrumentation.register_class(Post)
# set up instrumented attributes with backrefs
attributes.register_attribute(Post, 'blog', uselist=False,
- backref='posts',
- trackparent=True, useobject=True)
+ backref='posts',
+ trackparent=True, useobject=True)
attributes.register_attribute(Blog, 'posts', uselist=True,
- backref='blog',
- trackparent=True, useobject=True)
+ backref='blog',
+ trackparent=True, useobject=True)
- # create objects as if they'd been freshly loaded from the database (without history)
+ # create objects as if they'd been freshly loaded from the database
+ # (without history)
b = Blog()
p1 = Post()
- _set_callable(attributes.instance_state(b), attributes.instance_dict(b),
- 'posts', lambda state, passive:[p1])
- _set_callable(attributes.instance_state(p1), attributes.instance_dict(p1),
- 'blog', lambda state, passive:b)
- p1, attributes.instance_state(b)._commit_all(attributes.instance_dict(b))
+ _set_callable(attributes.instance_state(b),
+ attributes.instance_dict(b),
+ 'posts', lambda state, passive: [p1])
+ _set_callable(attributes.instance_state(p1),
+ attributes.instance_dict(p1),
+ 'blog', lambda state, passive: b)
+ p1, attributes.instance_state(b)._commit_all(
+ attributes.instance_dict(b))
# no orphans (called before the lazy loaders fire off)
assert attributes.has_parent(Blog, p1, 'posts', optimistic=True)
assert attributes.has_parent(Post, b2, 'blog')
def test_illegal_trackparent(self):
- class Post(object):pass
- class Blog(object):pass
+ class Post(object):
+ pass
+
+ class Blog(object):
+ pass
instrumentation.register_class(Post)
instrumentation.register_class(Blog)
Post.blog.impl.sethasparent, "x", "x", True
)
-
def test_inheritance(self):
"""tests that attributes are polymorphic"""
- class Foo(object):pass
- class Bar(Foo):pass
+ class Foo(object):
+ pass
+ class Bar(Foo):
+ pass
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
def func1(state, passive):
return "this is the foo attr"
+
def func2(state, passive):
return "this is the bar attr"
+
def func3(state, passive):
return "this is the shared attr"
attributes.register_attribute(Foo, 'element', uselist=False,
- callable_=func1, useobject=True)
+ callable_=func1, useobject=True)
attributes.register_attribute(Foo, 'element2', uselist=False,
- callable_=func3, useobject=True)
+ callable_=func3, useobject=True)
attributes.register_attribute(Bar, 'element', uselist=False,
- callable_=func2, useobject=True)
+ callable_=func2, useobject=True)
x = Foo()
y = Bar()
def test_no_double_state(self):
states = set()
+
class Foo(object):
def __init__(self):
states.add(attributes.instance_state(self))
+
class Bar(Foo):
def __init__(self):
states.add(attributes.instance_state(self))
Foo.__init__(self)
-
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
eq_(len(states), 1)
eq_(list(states)[0].obj(), b)
-
def test_inheritance2(self):
"""test that the attribute manager can properly traverse the
managed attributes of an object, if the object is of a
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'element', uselist=False,
- useobject=True)
+ useobject=True)
el = Element()
x = Bar()
x.element = el
eq_(attributes.get_state_history(attributes.instance_state(x),
- 'element'), ([el], (), ()))
+ 'element'), ([el], (), ()))
attributes.instance_state(x)._commit_all(attributes.instance_dict(x))
added, unchanged, deleted = \
attributes.get_state_history(attributes.instance_state(x),
- 'element')
+ 'element')
assert added == ()
assert unchanged == [el]
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
return [bar1, bar2, bar3]
attributes.register_attribute(Foo, 'col1', uselist=False,
- callable_=func1, useobject=True)
+ callable_=func1, useobject=True)
attributes.register_attribute(Foo, 'col2', uselist=True,
- callable_=func2, useobject=True)
+ callable_=func2, useobject=True)
attributes.register_attribute(Bar, 'id', uselist=False,
- useobject=True)
+ useobject=True)
x = Foo()
attributes.instance_state(x)._commit_all(attributes.instance_dict(x))
x.col2.append(bar4)
- eq_(attributes.get_state_history(attributes.instance_state(x),
- 'col2'), ([bar4], [bar1, bar2, bar3], []))
+ eq_(attributes.get_state_history(attributes.instance_state(x), 'col2'),
+ ([bar4], [bar1, bar2, bar3], []))
def test_parenttrack(self):
class Foo(object):
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'element', uselist=False,
- trackparent=True, useobject=True)
+ trackparent=True, useobject=True)
attributes.register_attribute(Bar, 'element', uselist=False,
- trackparent=True, useobject=True)
+ trackparent=True, useobject=True)
f1 = Foo()
f2 = Foo()
b1 = Bar()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'collection', uselist=True,
- typecallable=set, useobject=True)
+ typecallable=set, useobject=True)
assert attributes.manager_of_class(Foo).is_instrumented('collection'
- )
+ )
assert isinstance(Foo().collection, set)
attributes.unregister_attribute(Foo, 'collection')
- assert not attributes.manager_of_class(Foo).is_instrumented('collection'
- )
+ assert not attributes.manager_of_class(Foo) \
+ .is_instrumented('collection')
try:
attributes.register_attribute(Foo, 'collection',
- uselist=True, typecallable=dict, useobject=True)
+ uselist=True, typecallable=dict,
+ useobject=True)
assert False
except sa_exc.ArgumentError as e:
assert str(e) \
del self[item.foo]
attributes.register_attribute(Foo, 'collection', uselist=True,
- typecallable=MyDict, useobject=True)
+ typecallable=MyDict, useobject=True)
assert isinstance(Foo().collection, MyDict)
attributes.unregister_attribute(Foo, 'collection')
try:
attributes.register_attribute(Foo, 'collection',
- uselist=True, typecallable=MyColl, useobject=True)
+ uselist=True, typecallable=MyColl,
+ useobject=True)
assert False
except sa_exc.ArgumentError as e:
assert str(e) \
pass
attributes.register_attribute(Foo, 'collection', uselist=True,
- typecallable=MyColl, useobject=True)
+ typecallable=MyColl, useobject=True)
try:
Foo().collection
assert True
except sa_exc.ArgumentError as e:
assert False
+
class GetNoValueTest(fixtures.ORMTest):
def _fixture(self, expected):
class Foo(object):
instrumentation.register_class(Bar)
if expected is not None:
attributes.register_attribute(Foo,
- "attr", useobject=True,
- uselist=False, callable_=lazy_callable)
+ "attr", useobject=True,
+ uselist=False,
+ callable_=lazy_callable)
else:
attributes.register_attribute(Foo,
- "attr", useobject=True,
- uselist=False)
+ "attr", useobject=True,
+ uselist=False)
f1 = self.f1 = Foo()
return Foo.attr.impl,\
- attributes.instance_state(f1), \
- attributes.instance_dict(f1)
-
+ attributes.instance_state(f1), \
+ attributes.instance_dict(f1)
def test_passive_no_result(self):
attr, state, dict_ = self._fixture(attributes.PASSIVE_NO_RESULT)
def test_passive_ret_never_set_never_set(self):
attr, state, dict_ = self._fixture(attributes.NEVER_SET)
eq_(
- attr.get(state, dict_, passive=attributes.PASSIVE_RETURN_NEVER_SET),
+ attr.get(state, dict_,
+ passive=attributes.PASSIVE_RETURN_NEVER_SET),
attributes.NEVER_SET
)
assert 'attr' not in dict_
def test_passive_ret_never_set_empty(self):
attr, state, dict_ = self._fixture(None)
eq_(
- attr.get(state, dict_, passive=attributes.PASSIVE_RETURN_NEVER_SET),
+ attr.get(state, dict_,
+ passive=attributes.PASSIVE_RETURN_NEVER_SET),
attributes.NEVER_SET
)
assert 'attr' not in dict_
)
assert 'attr' not in dict_
+
class UtilTest(fixtures.ORMTest):
def test_helpers(self):
class Foo(object):
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- attributes.register_attribute(Foo, "coll", uselist=True, useobject=True)
+ attributes.register_attribute(
+ Foo, "coll", uselist=True, useobject=True)
f1 = Foo()
b1 = Bar()
attributes.del_attribute(f1, "coll")
assert "coll" not in f1.__dict__
+
class BackrefTest(fixtures.ORMTest):
def test_m2m(self):
- class Student(object):pass
- class Course(object):pass
+ class Student(object):
+ pass
+
+ class Course(object):
+ pass
instrumentation.register_class(Student)
instrumentation.register_class(Course)
attributes.register_attribute(Student, 'courses', uselist=True,
- backref="students", useobject=True)
+ backref="students", useobject=True)
attributes.register_attribute(Course, 'students', uselist=True,
- backref="courses", useobject=True)
+ backref="courses", useobject=True)
s = Student()
c = Course()
self.assert_(s2.courses == [c])
self.assert_(s1.courses == [c])
s1.courses.remove(c)
- self.assert_(c.students == [s2,s3])
+ self.assert_(c.students == [s2, s3])
def test_o2m(self):
- class Post(object):pass
- class Blog(object):pass
+ class Post(object):
+ pass
+
+ class Blog(object):
+ pass
instrumentation.register_class(Post)
instrumentation.register_class(Blog)
attributes.register_attribute(Post, 'blog', uselist=False,
- backref='posts',
- trackparent=True, useobject=True)
+ backref='posts',
+ trackparent=True, useobject=True)
attributes.register_attribute(Blog, 'posts', uselist=True,
- backref='blog',
- trackparent=True, useobject=True)
+ backref='blog',
+ trackparent=True, useobject=True)
b = Blog()
(p1, p2, p3) = (Post(), Post(), Post())
b.posts.append(p1)
del p5.blog
def test_o2o(self):
- class Port(object):pass
- class Jack(object):pass
+ class Port(object):
+ pass
+
+ class Jack(object):
+ pass
instrumentation.register_class(Port)
instrumentation.register_class(Jack)
attributes.register_attribute(Jack, 'port', uselist=False,
useobject=True, backref="jack")
-
p = Port()
j = Jack()
p.jack = j
class Parent(object):
pass
+
class Child(object):
pass
+
class SubChild(Child):
pass
instrumentation.register_class(Child)
instrumentation.register_class(SubChild)
attributes.register_attribute(Parent, 'child', uselist=False,
- backref="parent",
- parent_token = p_token,
- useobject=True)
+ backref="parent",
+ parent_token=p_token,
+ useobject=True)
attributes.register_attribute(Child, 'parent', uselist=False,
- backref="child",
- parent_token = c_token,
- useobject=True)
+ backref="child",
+ parent_token=c_token,
+ useobject=True)
attributes.register_attribute(SubChild, 'parent',
- uselist=False,
- backref="child",
- parent_token = c_token,
- useobject=True)
+ uselist=False,
+ backref="child",
+ parent_token=c_token,
+ useobject=True)
p1 = Parent()
c1 = Child()
def test_symmetric_o2m_inheritance(self):
class Parent(object):
pass
+
class SubParent(Parent):
pass
+
class Child(object):
pass
instrumentation.register_class(SubParent)
instrumentation.register_class(Child)
attributes.register_attribute(Parent, 'children', uselist=True,
- backref='parent',
- parent_token = p_token,
- useobject=True)
+ backref='parent',
+ parent_token=p_token,
+ useobject=True)
attributes.register_attribute(SubParent, 'children', uselist=True,
- backref='parent',
- parent_token = p_token,
- useobject=True)
+ backref='parent',
+ parent_token=p_token,
+ useobject=True)
attributes.register_attribute(Child, 'parent', uselist=False,
- backref='children',
- parent_token = c_token,
- useobject=True)
+ backref='children',
+ parent_token=c_token,
+ useobject=True)
p1 = Parent()
p2 = SubParent()
# event propagates to remove as of [ticket:2789]
assert c1 not in p1.children
+
class CyclicBackrefAssertionTest(fixtures.TestBase):
"""test that infinite recursion due to incorrect backref assignments
is blocked.
"""
+
def test_scalar_set_type_assertion(self):
A, B, C = self._scalar_fixture()
c1 = C()
c1.a.append, b1
)
-
def _scalar_fixture(self):
class A(object):
pass
+
class B(object):
pass
+
class C(object):
pass
instrumentation.register_class(A)
attributes.register_attribute(C, 'b', backref='c', useobject=True)
attributes.register_attribute(A, 'c', backref='a', useobject=True,
- uselist=True)
+ uselist=True)
attributes.register_attribute(B, 'c', backref='b', useobject=True,
- uselist=True)
+ uselist=True)
return A, B, C
def _collection_fixture(self):
class A(object):
pass
+
class B(object):
pass
+
class C(object):
pass
instrumentation.register_class(A)
instrumentation.register_class(C)
attributes.register_attribute(C, 'a', backref='c', useobject=True,
- uselist=True)
+ uselist=True)
attributes.register_attribute(C, 'b', backref='c', useobject=True,
- uselist=True)
+ uselist=True)
attributes.register_attribute(A, 'c', backref='a', useobject=True)
attributes.register_attribute(B, 'c', backref='b', useobject=True)
def _broken_collection_fixture(self):
class A(object):
pass
+
class B(object):
pass
instrumentation.register_class(A)
attributes.register_attribute(A, 'b', backref='a1', useobject=True)
attributes.register_attribute(B, 'a1', backref='b', useobject=True,
- uselist=True)
+ uselist=True)
attributes.register_attribute(B, 'a2', backref='b', useobject=True,
- uselist=True)
+ uselist=True)
return A, B
b1.a2.append, a1
)
+
class PendingBackrefTest(fixtures.ORMTest):
def _fixture(self):
class Post(object):
def __init__(self, name):
self.name = name
__hash__ = None
+
def __eq__(self, other):
return other is not None and other.name == self.name
def __init__(self, name):
self.name = name
__hash__ = None
+
def __eq__(self, other):
return other is not None and other.name == self.name
instrumentation.register_class(Post)
instrumentation.register_class(Blog)
attributes.register_attribute(Post, 'blog', uselist=False,
- backref='posts', trackparent=True, useobject=True)
+ backref='posts', trackparent=True,
+ useobject=True)
attributes.register_attribute(Blog, 'posts', uselist=True,
- backref='blog', callable_=lazy_posts, trackparent=True,
- useobject=True)
+ backref='blog', callable_=lazy_posts,
+ trackparent=True,
+ useobject=True)
return Post, Blog, lazy_posts
eq_(lazy_posts.call_count, 1)
eq_(attributes.instance_state(b).
- get_history('posts', attributes.PASSIVE_OFF),
- ([p, p4], [p1, p2, p3], []))
+ get_history('posts', attributes.PASSIVE_OFF),
+ ([p, p4], [p1, p2, p3], []))
eq_(lazy_posts.call_count, 1)
def test_passive_history_collection_never_set(self):
b = Blog("blog 1")
p = Post("post 1")
- state, dict_ = attributes.instance_state(b), attributes.instance_dict(b)
+ state, dict_ = (attributes.instance_state(b),
+ attributes.instance_dict(b))
# this sets up NEVER_SET on b.posts
p.blog = b
p = Post("post 1")
p.blog = b
eq_(lazy_posts.mock_calls,
- [call(b1_state, attributes.PASSIVE_NO_FETCH)])
+ [call(b1_state, attributes.PASSIVE_NO_FETCH)])
p.blog = None
eq_(lazy_posts.mock_calls,
- [call(b1_state, attributes.PASSIVE_NO_FETCH),
- call(b1_state, attributes.PASSIVE_NO_FETCH)])
+ [call(b1_state, attributes.PASSIVE_NO_FETCH),
+ call(b1_state, attributes.PASSIVE_NO_FETCH)])
lazy_posts.return_value = []
eq_(b.posts, [])
eq_(lazy_posts.mock_calls,
- [call(b1_state, attributes.PASSIVE_NO_FETCH),
- call(b1_state, attributes.PASSIVE_NO_FETCH),
- call(b1_state, attributes.PASSIVE_OFF)])
+ [call(b1_state, attributes.PASSIVE_NO_FETCH),
+ call(b1_state, attributes.PASSIVE_NO_FETCH),
+ call(b1_state, attributes.PASSIVE_OFF)])
def test_pending_combines_with_lazy(self):
Post, Blog, lazy_posts = self._fixture()
eq_(lazy_posts.mock_calls,
[call(b_state, attributes.PASSIVE_OFF)])
-
def test_commit_removes_pending(self):
Post, Blog, lazy_posts = self._fixture()
eq_(b.posts, [Post("post 1")])
eq_(lazy_posts.mock_calls,
[call(b_state, attributes.PASSIVE_NO_FETCH),
- call(b_state, attributes.PASSIVE_OFF)])
+ call(b_state, attributes.PASSIVE_OFF)])
+
class HistoryTest(fixtures.TestBase):
instrumentation.register_class(Foo)
attributes.register_attribute(
- Foo, 'someattr',
- uselist=uselist,
- useobject=useobject,
- active_history=active_history,
- **kw)
+ Foo, 'someattr',
+ uselist=uselist,
+ useobject=useobject,
+ active_history=active_history,
+ **kw)
return Foo
def _two_obj_fixture(self, uselist):
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
def __bool__(self):
assert False
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'someattr', uselist=uselist,
- useobject=True)
+ useobject=True)
return Foo, Bar
def _someattr_history(self, f, **kw):
kw['passive'] = attributes.PASSIVE_OFF
return attributes.get_state_history(
- attributes.instance_state(f),
- 'someattr', **kw)
+ attributes.instance_state(f),
+ 'someattr', **kw)
def _commit_someattr(self, f):
attributes.instance_state(f)._commit(attributes.instance_dict(f),
- ['someattr'])
+ ['someattr'])
def _someattr_committed_state(self, f):
Foo = f.__class__
def test_committed_value_init(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
eq_(self._someattr_committed_state(f), None)
def test_committed_value_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 3
eq_(self._someattr_committed_state(f), None)
def test_committed_value_set_active_hist(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 3
eq_(self._someattr_committed_state(f), None)
def test_committed_value_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 3
self._commit_someattr(f)
def test_scalar_init(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
eq_(self._someattr_history(f), ((), (), ()))
def test_object_init(self):
Foo = self._fixture(uselist=False, useobject=True,
- active_history=False)
+ active_history=False)
f = Foo()
eq_(self._someattr_history(f), ((), (), ()))
def test_object_init_active_history(self):
Foo = self._fixture(uselist=False, useobject=True,
- active_history=True)
+ active_history=True)
f = Foo()
eq_(self._someattr_history(f), ((), (), ()))
def test_scalar_no_init_side_effect(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
self._someattr_history(f)
# no side effects
def test_scalar_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 'hi'
eq_(self._someattr_history(f), (['hi'], (), ()))
# test_use_object_set_None,
# test_use_object_get_first_set_None
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = None
eq_(self._someattr_history(f), ([None], (), ()))
# test_use_object_set_None,
# test_use_object_get_first_set_None
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
assert f.someattr is None
f.someattr = None
def test_scalar_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 'hi'
self._commit_someattr(f)
def test_scalar_set_commit_reset(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 'hi'
self._commit_someattr(f)
def test_scalar_set_commit_reset_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 'hi'
self._commit_someattr(f)
def test_scalar_set_commit_reset_commit_del(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 'there'
self._commit_someattr(f)
def test_scalar_set_dict(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.__dict__['someattr'] = 'new'
eq_(self._someattr_history(f), ((), ['new'], ()))
def test_scalar_set_dict_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.__dict__['someattr'] = 'new'
self._someattr_history(f)
def test_scalar_set_dict_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.__dict__['someattr'] = 'new'
self._someattr_history(f)
def test_scalar_set_None(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = None
eq_(self._someattr_history(f), ([None], (), ()))
def test_scalar_set_None_from_dict_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.__dict__['someattr'] = 'new'
f.someattr = None
def test_scalar_set_twice_no_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = 'one'
eq_(self._someattr_history(f), (['one'], (), ()))
def test_scalar_active_init(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
eq_(self._someattr_history(f), ((), (), ()))
def test_scalar_active_no_init_side_effect(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
self._someattr_history(f)
# no side effects
def test_collection_never_set(self):
Foo = self._fixture(uselist=True, useobject=True,
- active_history=True)
+ active_history=True)
f = Foo()
eq_(self._someattr_history(f, passive=True), (None, None, None))
def test_scalar_obj_never_set(self):
Foo = self._fixture(uselist=False, useobject=True,
- active_history=True)
+ active_history=True)
f = Foo()
eq_(self._someattr_history(f, passive=True), (None, None, None))
def test_scalar_never_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
eq_(self._someattr_history(f, passive=True), (None, None, None))
def test_scalar_active_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'hi'
eq_(self._someattr_history(f), (['hi'], (), ()))
def test_scalar_active_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'hi'
self._commit_someattr(f)
def test_scalar_active_set_commit_reset(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'hi'
self._commit_someattr(f)
def test_scalar_active_set_commit_reset_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'hi'
self._commit_someattr(f)
def test_scalar_active_set_commit_reset_commit_del(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'there'
self._commit_someattr(f)
def test_scalar_active_set_dict(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.__dict__['someattr'] = 'new'
eq_(self._someattr_history(f), ((), ['new'], ()))
def test_scalar_active_set_dict_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.__dict__['someattr'] = 'new'
self._someattr_history(f)
def test_scalar_active_set_dict_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.__dict__['someattr'] = 'new'
self._someattr_history(f)
def test_scalar_active_set_None(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = None
eq_(self._someattr_history(f), ([None], (), ()))
def test_scalar_active_set_None_from_dict_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.__dict__['someattr'] = 'new'
f.someattr = None
def test_scalar_active_set_twice_no_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'one'
eq_(self._someattr_history(f), (['one'], (), ()))
f.someattr = 'two'
eq_(self._someattr_history(f), (['two'], (), ()))
-
def test_scalar_passive_flag(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=True)
+ active_history=True)
f = Foo()
f.someattr = 'one'
eq_(self._someattr_history(f), (['one'], (), ()))
eq_(self._someattr_history(f), ((), ['one'], ()))
-
def test_scalar_inplace_mutation_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
eq_(self._someattr_history(f), ([{'a': 'b'}], (), ()))
def test_scalar_inplace_mutation_set_commit(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
self._commit_someattr(f)
def test_scalar_inplace_mutation_set_commit_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
self._commit_someattr(f)
def test_scalar_inplace_mutation_set_commit_flag_modified(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
self._commit_someattr(f)
def test_scalar_inplace_mutation_set_commit_set_flag_modified(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
self._commit_someattr(f)
def test_scalar_inplace_mutation_set_commit_flag_modified_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
self._commit_someattr(f)
def test_scalar_inplace_mutation_replace_self_flag_modified_set(self):
Foo = self._fixture(uselist=False, useobject=False,
- active_history=False)
+ active_history=False)
f = Foo()
f.someattr = {'a': 'b'}
self._commit_someattr(f)
attributes.flag_modified(f, 'someattr')
eq_(self._someattr_history(f), ([{'a': 'b'}], (), ()))
-
def test_use_object_init(self):
Foo, Bar = self._two_obj_fixture(uselist=False)
f = Foo()
f.someattr = there
eq_(self._someattr_history(f), ([there], (), ()))
-
def test_object_collections_set(self):
# TODO: break into individual tests
f = Foo()
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [], ()))
+ 'someattr'), ((), [], ()))
f.someattr = [hi]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi], [], []))
+ 'someattr'), ([hi], [], []))
self._commit_someattr(f)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [hi], ()))
+ 'someattr'), ((), [hi], ()))
f.someattr = [there]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([there], [], [hi]))
+ 'someattr'), ([there], [], [hi]))
self._commit_someattr(f)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [there], ()))
+ 'someattr'), ((), [there], ()))
f.someattr = [hi]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi], [], [there]))
+ 'someattr'), ([hi], [], [there]))
f.someattr = [old, new]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([old, new], [], [there]))
+ 'someattr'),
+ ([old, new], [], [there]))
# case 2. object with direct settings (similar to a load
# operation)
collection.append_without_event(new)
attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [new], ()))
+ 'someattr'), ((), [new], ()))
f.someattr = [old]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([old], [], [new]))
+ 'someattr'), ([old], [], [new]))
self._commit_someattr(f)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [old], ()))
+ 'someattr'), ((), [old], ()))
def test_dict_collections(self):
# TODO: break into individual tests
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
from sqlalchemy.orm.collections import attribute_mapped_collection
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- attributes.register_attribute(Foo, 'someattr', uselist=True,
- useobject=True,
- typecallable=attribute_mapped_collection('name'))
+ attributes.register_attribute(
+ Foo, 'someattr', uselist=True, useobject=True,
+ typecallable=attribute_mapped_collection('name'))
hi = Bar(name='hi')
there = Bar(name='there')
old = Bar(name='old')
new = Bar(name='new')
f = Foo()
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [], ()))
+ 'someattr'), ((), [], ()))
f.someattr['hi'] = hi
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi], [], []))
+ 'someattr'), ([hi], [], []))
f.someattr['there'] = there
eq_(tuple([set(x) for x in
- attributes.get_state_history(attributes.instance_state(f),
- 'someattr')]), (set([hi, there]), set(), set()))
+ attributes.get_state_history(attributes.instance_state(f),
+ 'someattr')]),
+ (set([hi, there]), set(), set()))
self._commit_someattr(f)
eq_(tuple([set(x) for x in
- attributes.get_state_history(attributes.instance_state(f),
- 'someattr')]), (set(), set([hi, there]), set()))
+ attributes.get_state_history(attributes.instance_state(f),
+ 'someattr')]),
+ (set(), set([hi, there]), set()))
def test_object_collections_mutate(self):
# TODO: break into individual tests
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=True,
- useobject=True)
+ useobject=True)
attributes.register_attribute(Foo, 'id', uselist=False,
- useobject=False)
+ useobject=False)
instrumentation.register_class(Bar)
hi = Bar(name='hi')
there = Bar(name='there')
f = Foo(id=1)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [], ()))
+ 'someattr'), ((), [], ()))
f.someattr.append(hi)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi], [], []))
+ 'someattr'), ([hi], [], []))
self._commit_someattr(f)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [hi], ()))
+ 'someattr'), ((), [hi], ()))
f.someattr.append(there)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([there], [hi], []))
+ 'someattr'), ([there], [hi], []))
self._commit_someattr(f)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [hi, there], ()))
+ 'someattr'), ((), [hi, there], ()))
f.someattr.remove(there)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([], [hi], [there]))
+ 'someattr'), ([], [hi], [there]))
f.someattr.append(old)
f.someattr.append(new)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([old, new], [hi], [there]))
+ 'someattr'),
+ ([old, new], [hi], [there]))
attributes.instance_state(f)._commit(attributes.instance_dict(f),
- ['someattr'])
+ ['someattr'])
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [hi, old, new], ()))
+ 'someattr'), ((), [hi, old, new], ()))
f.someattr.pop(0)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([], [old, new], [hi]))
+ 'someattr'), ([], [old, new], [hi]))
# case 2. object with direct settings (similar to a load
# operation)
collection.append_without_event(new)
attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [new], ()))
+ 'someattr'), ((), [new], ()))
f.someattr.append(old)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([old], [new], []))
+ 'someattr'), ([old], [new], []))
attributes.instance_state(f)._commit(attributes.instance_dict(f),
- ['someattr'])
+ ['someattr'])
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [new, old], ()))
+ 'someattr'), ((), [new, old], ()))
f = Foo()
collection = attributes.init_collection(f, 'someattr')
collection.append_without_event(new)
attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [new], ()))
+ 'someattr'), ((), [new], ()))
f.id = 1
f.someattr.remove(new)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([], [], [new]))
+ 'someattr'), ([], [], [new]))
# case 3. mixing appends with sets
f = Foo()
f.someattr.append(hi)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi], [], []))
+ 'someattr'), ([hi], [], []))
f.someattr.append(there)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi, there], [], []))
+ 'someattr'), ([hi, there], [], []))
f.someattr = [there]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([there], [], []))
+ 'someattr'), ([there], [], []))
# case 4. ensure duplicates show up, order is maintained
f.someattr.append(there)
f.someattr.append(hi)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([hi, there, hi], [], []))
+ 'someattr'),
+ ([hi, there, hi], [], []))
attributes.instance_state(f)._commit_all(attributes.instance_dict(f))
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ((), [hi, there, hi], ()))
+ 'someattr'),
+ ((), [hi, there, hi], ()))
f.someattr = []
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'someattr'), ([], [], [hi, there, hi]))
+ 'someattr'),
+ ([], [], [hi, there, hi]))
def test_collections_via_backref(self):
# TODO: break into individual tests
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True,
- backref='foo', trackparent=True, useobject=True)
+ backref='foo', trackparent=True,
+ useobject=True)
attributes.register_attribute(Bar, 'foo', uselist=False,
- backref='bars', trackparent=True, useobject=True)
+ backref='bars', trackparent=True,
+ useobject=True)
f1 = Foo()
b1 = Bar()
eq_(attributes.get_state_history(attributes.instance_state(f1),
- 'bars'), ((), [], ()))
+ 'bars'), ((), [], ()))
eq_(attributes.get_state_history(attributes.instance_state(b1),
- 'foo'), ((), (), ()))
+ 'foo'), ((), (), ()))
# b1.foo = f1
f1.bars.append(b1)
eq_(attributes.get_state_history(attributes.instance_state(f1),
- 'bars'), ([b1], [], []))
+ 'bars'), ([b1], [], []))
eq_(attributes.get_state_history(attributes.instance_state(b1),
- 'foo'), ([f1], (), ()))
+ 'foo'), ([f1], (), ()))
b2 = Bar()
f1.bars.append(b2)
eq_(attributes.get_state_history(attributes.instance_state(f1),
- 'bars'), ([b1, b2], [], []))
+ 'bars'), ([b1, b2], [], []))
eq_(attributes.get_state_history(attributes.instance_state(b1),
- 'foo'), ([f1], (), ()))
+ 'foo'), ([f1], (), ()))
eq_(attributes.get_state_history(attributes.instance_state(b2),
- 'foo'), ([f1], (), ()))
+ 'foo'), ([f1], (), ()))
def test_deprecated_flags(self):
assert_raises_message(
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
lazy_load = []
+
def lazyload(state, passive):
return lazy_load
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True,
- backref='foo', trackparent=True, callable_=lazyload,
- useobject=True)
+ backref='foo', trackparent=True,
+ callable_=lazyload,
+ useobject=True)
attributes.register_attribute(Bar, 'foo', uselist=False,
- backref='bars', trackparent=True, useobject=True)
+ backref='bars', trackparent=True,
+ useobject=True)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3),
Bar(id=4)]
lazy_load = [bar1, bar2, bar3]
bar4 = Bar()
bar4.foo = f
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([bar4], [bar1, bar2, bar3], []))
+ 'bars'),
+ ([bar4], [bar1, bar2, bar3], []))
lazy_load = None
f = Foo()
bar4 = Bar()
bar4.foo = f
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([bar4], [], []))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bars'),
+ ([bar4], [], []))
lazy_load = [bar1, bar2, bar3]
- attributes.instance_state(f)._expire_attributes(attributes.instance_dict(f),
- ['bars'])
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ((), [bar1, bar2, bar3], ()))
+ attributes.instance_state(f)._expire_attributes(
+ attributes.instance_dict(f),
+ ['bars'])
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bars'),
+ ((), [bar1, bar2, bar3], ()))
def test_collections_via_lazyload(self):
# TODO: break into individual tests
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
lazy_load = []
+
def lazyload(state, passive):
return lazy_load
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True,
- callable_=lazyload, trackparent=True, useobject=True)
+ callable_=lazyload, trackparent=True,
+ useobject=True)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3),
Bar(id=4)]
lazy_load = [bar1, bar2, bar3]
f = Foo()
f.bars = []
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([], [], [bar1, bar2, bar3]))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bars'),
+ ([], [], [bar1, bar2, bar3]))
f = Foo()
f.bars.append(bar4)
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([bar4], [bar1, bar2, bar3], []))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bars'),
+ ([bar4], [bar1, bar2, bar3], []))
f = Foo()
f.bars.remove(bar2)
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([], [bar1, bar3], [bar2]))
+ 'bars'), ([], [bar1, bar3], [bar2]))
f.bars.append(bar4)
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([bar4], [bar1, bar3], [bar2]))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bars'),
+ ([bar4], [bar1, bar3], [bar2]))
f = Foo()
del f.bars[1]
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([], [bar1, bar3], [bar2]))
+ 'bars'), ([], [bar1, bar3], [bar2]))
lazy_load = None
f = Foo()
f.bars.append(bar2)
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bars'), ([bar2], [], []))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bars'),
+ ([bar2], [], []))
def test_scalar_via_lazyload(self):
# TODO: break into individual tests
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'bar', uselist=False,
- callable_=lazyload, useobject=False)
+ callable_=lazyload, useobject=False)
lazy_load = 'hi'
# with scalar non-object and active_history=False, the lazy
f = Foo()
eq_(f.bar, 'hi')
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), ['hi'], ()))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), ['hi'], ()))
f = Foo()
f.bar = None
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ([None], (), ()))
+ 'bar'), ([None], (), ()))
f = Foo()
f.bar = 'there'
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), (['there'], (), ()))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ (['there'], (), ()))
f.bar = 'hi'
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), (['hi'], (), ()))
+ 'bar'), (['hi'], (), ()))
f = Foo()
eq_(f.bar, 'hi')
del f.bar
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), (), ['hi']))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), (), ['hi']))
assert f.bar is None
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), (), ['hi']))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), (), ['hi']))
def test_scalar_via_lazyload_with_active(self):
# TODO: break into individual tests
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'bar', uselist=False,
- callable_=lazyload, useobject=False,
- active_history=True)
+ callable_=lazyload, useobject=False,
+ active_history=True)
lazy_load = 'hi'
# active_history=True means the lazy callable is executed on set
f = Foo()
eq_(f.bar, 'hi')
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), ['hi'], ()))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), ['hi'], ()))
f = Foo()
f.bar = None
eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ([None], (), ['hi']))
+ 'bar'), ([None], (), ['hi']))
f = Foo()
f.bar = 'there'
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), (['there'], (), ['hi']))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ (['there'], (), ['hi']))
f.bar = 'hi'
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), ['hi'], ()))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), ['hi'], ()))
f = Foo()
eq_(f.bar, 'hi')
del f.bar
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), (), ['hi']))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), (), ['hi']))
assert f.bar is None
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), (), ['hi']))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), (), ['hi']))
def test_scalar_object_via_lazyload(self):
# TODO: break into individual tests
class Foo(fixtures.BasicEntity):
pass
+
class Bar(fixtures.BasicEntity):
pass
lazy_load = None
+
def lazyload(state, passive):
return lazy_load
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bar', uselist=False,
- callable_=lazyload, trackparent=True, useobject=True)
+ callable_=lazyload, trackparent=True,
+ useobject=True)
bar1, bar2 = [Bar(id=1), Bar(id=2)]
lazy_load = bar1
# and history operations
f = Foo()
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), [bar1], ()))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), [bar1], ()))
f = Foo()
f.bar = None
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ([None], (), [bar1]))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ([None], (), [bar1]))
f = Foo()
f.bar = bar2
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ([bar2], (), [bar1]))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ([bar2], (), [bar1]))
f.bar = bar1
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), [bar1], ()))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), [bar1], ()))
f = Foo()
eq_(f.bar, bar1)
del f.bar
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), (), [bar1]))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), (), [bar1]))
assert f.bar is None
- eq_(attributes.get_state_history(attributes.instance_state(f),
- 'bar'), ((), (), [bar1]))
+ eq_(attributes.get_state_history(attributes.instance_state(f), 'bar'),
+ ((), (), [bar1]))
+
class ListenerTest(fixtures.ORMTest):
def test_receive_changes(self):
class Foo(object):
pass
+
class Bar(object):
pass
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'data', uselist=False,
- useobject=False)
+ useobject=False)
attributes.register_attribute(Foo, 'barlist', uselist=True,
- useobject=True)
+ useobject=True)
attributes.register_attribute(Foo, 'barset', typecallable=set,
- uselist=True, useobject=True)
+ uselist=True, useobject=True)
attributes.register_attribute(Bar, 'data', uselist=False,
- useobject=False)
+ useobject=False)
event.listen(Foo.data, 'set', on_set, retval=True)
event.listen(Foo.barlist, 'append', append, retval=True)
event.listen(Foo.barset, 'append', append, retval=True)
def test_collection_link_events(self):
class Foo(object):
pass
+
class Bar(object):
pass
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'barlist', uselist=True,
- useobject=True)
+ useobject=True)
canary = Mock()
event.listen(Foo.barlist, "init_collection", canary.init)
]
)
-
def test_none_on_collection_event(self):
"""test that append/remove of None in collections emits events.
"""
class Foo(object):
pass
+
class Bar(object):
pass
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'barlist', uselist=True,
- useobject=True)
+ useobject=True)
canary = []
+
def append(state, child, initiator):
canary.append((state, child))
+
def remove(state, child, initiator):
canary.append((state, child))
event.listen(Foo.barlist, 'append', append)
def test_none_init_scalar(self):
canary = Mock()
+
class Foo(object):
pass
instrumentation.register_class(Foo)
def test_none_init_object(self):
canary = Mock()
+
class Foo(object):
pass
instrumentation.register_class(Foo)
def test_none_init_collection(self):
canary = Mock()
+
class Foo(object):
pass
instrumentation.register_class(Foo)
# reversal of approach in #3061
eq_(canary.mock_calls, [])
-
def test_propagate(self):
classes = [None, None, None]
canary = []
+
def make_a():
class A(object):
pass
def attr_a():
attributes.register_attribute(classes[0], 'attrib',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
def attr_b():
attributes.register_attribute(classes[1], 'attrib',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
def attr_c():
attributes.register_attribute(classes[2], 'attrib',
- uselist=False, useobject=False)
+ uselist=False, useobject=False)
def set(state, value, oldvalue, initiator):
canary.append(value)
def setUp(self):
class A(object):
pass
+
class B(object):
pass
self.A = A
instrumentation.register_class(A)
instrumentation.register_class(B)
attributes.register_attribute(A, 'bs', uselist=True,
- useobject=True)
+ useobject=True)
def test_expired(self):
A, B = self.A, self.B
"""
-a series of tests which assert the behavior of moving objects between collections
-and scalar attributes resulting in the expected state w.r.t. backrefs, add/remove
-events, etc.
+a series of tests which assert the behavior of moving objects between
+collections and scalar attributes resulting in the expected state w.r.t.
+backrefs, add/remove events, etc.
-there's a particular focus on collections that have "uselist=False", since in these
-cases the re-assignment of an attribute means the previous owner needs an
+there's a particular focus on collections that have "uselist=False", since in
+these cases the re-assignment of an attribute means the previous owner needs an
UPDATE in the database.
"""
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
+
class O2MCollectionTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = dict(
- addresses = relationship(Address, backref="user"),
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address, backref="user"),
))
def test_collection_move_hitslazy(self):
a1 = Address(email_address="address1")
a2 = Address(email_address="address2")
a3 = Address(email_address="address3")
- u1= User(name='jack', addresses=[a1, a2, a3])
- u2= User(name='ed')
+ u1 = User(name='jack', addresses=[a1, a2, a3])
+ u2 = User(name='ed')
sess.add_all([u1, a1, a2, a3])
sess.commit()
- #u1.addresses
+ # u1.addresses
def go():
u2.addresses.append(a1)
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load u1.addresses collection
u1.addresses
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
u2.addresses.append(a1)
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load u1.addresses collection
u1.addresses
User, Address = self.classes.User, self.classes.Address
-
sess = sessionmaker()()
u1 = User(name='jack')
u2 = User(name='ed')
assert a1 not in u1.addresses
-
-
def test_scalar_move_notloaded(self):
User, Address = self.classes.User, self.classes.Address
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = {
- 'address':relationship(Address, backref=backref("user"), uselist=False)
+ mapper(User, users, properties={
+ 'address': relationship(Address, backref=backref("user"),
+ uselist=False)
})
def test_collection_move_preloaded(self):
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load u1.address
u1.address
u1 = User(name='jack', address=a1)
sess.add_all([u1, a1, a2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load a1.user
a1.user
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# reassign
u2.address = a1
u1 = User(name='jack', address=a1)
sess.add_all([u1, a1, a2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# reassign
a2.user = u1
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load u1.address
u1.address
u1 = User(name='jack', address=a1)
sess.add_all([u1, a1, a2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load
assert a1.user is u1
assert a1.user is None
assert a2.user is u1
+
class O2OScalarMoveTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = {
- 'address':relationship(Address, uselist=False)
+ mapper(User, users, properties={
+ 'address': relationship(Address, uselist=False)
})
def test_collection_move_commitfirst(self):
u2 = User(name='ed')
sess.add_all([u1, u2])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load u1.address
u1.address
assert u1.address is None
assert u2.address is a1
+
class O2OScalarOrphanTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = {
- 'address':relationship(Address, uselist=False,
+ mapper(User, users, properties={
+ 'address': relationship(
+ Address, uselist=False,
backref=backref('user', single_parent=True,
- cascade="all, delete-orphan"))
+ cascade="all, delete-orphan"))
})
def test_m2o_event(self):
sess.commit()
sess.expunge(u1)
- u2= User(name='ed')
+ u2 = User(name='ed')
# the _SingleParent extension sets the backref get to "active" !
# u1 gets loaded and deleted
u2.address = a1
@classmethod
def setup_mappers(cls):
- keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords,
- cls.tables.items,
- cls.tables.item_keywords,
- cls.classes.Keyword,
- cls.classes.Item)
+ keywords, items, item_keywords, \
+ Keyword, Item = (cls.tables.keywords,
+ cls.tables.items,
+ cls.tables.item_keywords,
+ cls.classes.Keyword,
+ cls.classes.Item)
mapper(Item, items, properties={
- 'keywords':relationship(Keyword, secondary=item_keywords,
- backref='items')
+ 'keywords': relationship(Keyword, secondary=item_keywords,
+ backref='items')
})
mapper(Keyword, keywords)
session.expire(i1, ['keywords'])
- k1= Keyword(name='k1')
+ k1 = Keyword(name='k1')
k1.items.append(i1)
k1.items.remove(i1)
eq_(i1.keywords, [])
session = Session(autoflush=False)
- k1= Keyword(name='k1')
+ k1 = Keyword(name='k1')
i1 = Item(description='i1', keywords=[k1])
session.add(i1)
session.commit()
# the pending collection was removed
assert 'keywords' not in attributes.\
- instance_state(i1).\
- _pending_mutations
+ instance_state(i1).\
+ _pending_mutations
def test_duplicate_adds(self):
Item, Keyword = (self.classes.Item, self.classes.Keyword)
session.commit()
eq_(k1.items, [i1])
+
class M2MScalarMoveTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
- keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords,
- cls.tables.items,
- cls.tables.item_keywords,
- cls.classes.Keyword,
- cls.classes.Item)
+ keywords, items, item_keywords, \
+ Keyword, Item = (cls.tables.keywords,
+ cls.tables.items,
+ cls.tables.item_keywords,
+ cls.classes.Keyword,
+ cls.classes.Item)
mapper(Item, items, properties={
- 'keyword':relationship(Keyword, secondary=item_keywords,
+ 'keyword': relationship(Keyword, secondary=item_keywords,
uselist=False,
backref=backref("item", uselist=False))
})
i2 = Item(description='i2')
sess.add_all([i1, i2, k1])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load i1.keyword
assert i1.keyword is k1
i2 = Item(description='i2')
sess.add_all([i1, i2, k1])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
i2.keyword = k1
i2 = Item(description='i2')
sess.add_all([i1, i2, k1])
- sess.commit() # everything is expired
+ sess.commit() # everything is expired
# load i1.keyword
assert i1.keyword is k1
assert i1.keyword is None
assert i2.keyword is k1
+
class O2MStaleBackrefTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = dict(
- addresses = relationship(Address, backref="user"),
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address, backref="user"),
))
-
def test_backref_pop_m2o(self):
User, Address = self.classes.User, self.classes.Address
assert a1.user is u2
assert a1 in u2.addresses
+
class M2MStaleBackrefTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
- keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords,
- cls.tables.items,
- cls.tables.item_keywords,
- cls.classes.Keyword,
- cls.classes.Item)
+ keywords, items, item_keywords, \
+ Keyword, Item = (cls.tables.keywords,
+ cls.tables.items,
+ cls.tables.item_keywords,
+ cls.classes.Keyword,
+ cls.classes.Item)
mapper(Item, items, properties={
- 'keywords':relationship(Keyword, secondary=item_keywords,
- backref='items')
+ 'keywords': relationship(Keyword, secondary=item_keywords,
+ backref='items')
})
mapper(Keyword, keywords)
session.get_bind(self.classes.ConcreteSubClass),
concrete_sub_bind
)
-
-
asserter.assert_(
CompiledSQL(
- "INSERT INTO orders (id, description) VALUES (:id, :description)",
+ "INSERT INTO orders (id, description) "
+ "VALUES (:id, :description)",
[{'id': 1, 'description': 'u1new'},
{'id': 2, 'description': None},
{'id': 3, 'description': 'u3new'}]
from sqlalchemy import Integer, select, ForeignKey, String, func
from sqlalchemy.orm import mapper, relationship, aliased
+
class BundleTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('data', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('d1', String(10)),
- Column('d2', String(10)),
- Column('d3', String(10))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('d1', String(10)),
+ Column('d2', String(10)),
+ Column('d3', String(10)))
Table('other', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data_id', ForeignKey('data.id')),
- Column('o1', String(10))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data_id', ForeignKey('data.id')),
+ Column('o1', String(10)))
@classmethod
def setup_classes(cls):
class Data(cls.Basic):
pass
+
class Other(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
mapper(cls.classes.Data, cls.tables.data, properties={
- 'others': relationship(cls.classes.Other)
- })
+ 'others': relationship(cls.classes.Other)
+ })
mapper(cls.classes.Other, cls.tables.other)
@classmethod
sess = Session()
sess.add_all([
cls.classes.Data(d1='d%dd1' % i, d2='d%dd2' % i, d3='d%dd3' % i,
- others=[cls.classes.Other(o1="d%do%d" % (i, j)) for j in range(5)])
+ others=[cls.classes.Other(o1="d%do%d" % (i, j))
+ for j in range(5)])
for i in range(10)
])
sess.commit()
b1 = Bundle('b1', Data.d1, Data.d2, single_entity=True)
eq_(
- sess.query(b1).
- filter(b1.c.d1.between('d3d1', 'd5d1')).
- all(),
+ sess.query(b1).filter(b1.c.d1.between('d3d1', 'd5d1')).all(),
[('d3d1', 'd3d2'), ('d4d1', 'd4d2'), ('d5d1', 'd5d2')]
)
b2 = Bundle('b1', Data.d3, single_entity=True)
eq_(
- sess.query(b1, b2).
- filter(b1.c.d1.between('d3d1', 'd5d1')).
- all(),
- [
- (('d3d1', 'd3d2'), ('d3d3',)),
- (('d4d1', 'd4d2'), ('d4d3',)),
- (('d5d1', 'd5d2'), ('d5d3',))
+ sess.query(b1, b2).filter(b1.c.d1.between('d3d1', 'd5d1')).all(),
+ [
+ (('d3d1', 'd3d2'), ('d3d3',)),
+ (('d4d1', 'd4d2'), ('d4d3',)),
+ (('d5d1', 'd5d2'), ('d5d3',))
]
)
eq_(
sess.query(b1).
- filter(b1.c.d1.between('d3d1', 'd7d1')).
- filter(b1.c.b2.c.d2.between('d4d2', 'd6d2')).
- all(),
+ filter(b1.c.d1.between('d3d1', 'd7d1')).
+ filter(b1.c.b2.c.d2.between('d4d2', 'd6d2')).
+ all(),
[(('d4d1', ('d4d2', 'd4d3')),), (('d5d1', ('d5d2', 'd5d3')),),
(('d6d1', ('d6d2', 'd6d3')),)]
)
b1 = Bundle('b1', Data.d1, Bundle('b2', Data.d2, Data.d3))
q1 = sess.query(b1).\
- filter(b1.c.d1.between('d3d1', 'd7d1')).\
- filter(b1.c.b2.c.d2.between('d4d2', 'd5d2'))
+ filter(b1.c.d1.between('d3d1', 'd7d1')).\
+ filter(b1.c.b2.c.d2.between('d4d2', 'd5d2'))
q2 = sess.query(b1).\
- filter(b1.c.d1.between('d3d1', 'd7d1')).\
- filter(b1.c.b2.c.d2.between('d5d2', 'd6d2'))
+ filter(b1.c.d1.between('d3d1', 'd7d1')).\
+ filter(b1.c.b2.c.d2.between('d5d2', 'd6d2'))
eq_(
q1.union(q2).all(),
eq_(row.b1.d1, 'd4d1')
eq_(row.b1.b2.d2, 'd4d2')
-
def test_query_count(self):
Data = self.classes.Data
b1 = Bundle('b1', Data.d1, Data.d2)
b1 = Bundle('b1', Data.d1, Data.d2)
q = sess.query(b1).join(Data.others)
self.assert_compile(q,
- "SELECT data.d1 AS data_d1, data.d2 AS data_d2 FROM data "
- "JOIN other ON data.id = other.data_id"
- )
+ "SELECT data.d1 AS data_d1, data.d2 "
+ "AS data_d2 FROM data "
+ "JOIN other ON data.id = other.data_id")
def test_join_selectable(self):
Data = self.classes.Data
b1 = Bundle('b1', Data.d1, Data.d2)
q = sess.query(b1).join(Other)
self.assert_compile(q,
- "SELECT data.d1 AS data_d1, data.d2 AS data_d2 FROM data "
- "JOIN other ON data.id = other.data_id"
- )
-
+ "SELECT data.d1 AS data_d1, data.d2 AS data_d2 "
+ "FROM data "
+ "JOIN other ON data.id = other.data_id")
def test_joins_from_adapted_entities(self):
Data = self.classes.Data
self.assert_compile(
joined,
- "SELECT anon_1.data_id AS anon_1_data_id, anon_1.data_d1 AS anon_1_data_d1, "
+ "SELECT anon_1.data_id AS anon_1_data_id, "
+ "anon_1.data_d1 AS anon_1_data_d1, "
"anon_1.data_d2 AS anon_1_data_d2 FROM "
- "(SELECT data.id AS data_id, data.d1 AS data_d1, data.d2 AS data_d2 FROM "
+ "(SELECT data.id AS data_id, data.d1 AS data_d1, "
+ "data.d2 AS data_d2 FROM "
"data UNION SELECT data.id AS data_id, data.d1 AS data_d1, "
"data.d2 AS data_d2 FROM data) AS anon_1 "
"LEFT OUTER JOIN (SELECT data.id AS id FROM data) AS anon_2 "
eq_(
joined.all(),
[((1, 'd0d1', 'd0d2'),), ((2, 'd1d1', 'd1d2'),),
- ((3, 'd2d1', 'd2d2'),), ((4, 'd3d1', 'd3d2'),),
- ((5, 'd4d1', 'd4d2'),), ((6, 'd5d1', 'd5d2'),),
- ((7, 'd6d1', 'd6d2'),), ((8, 'd7d1', 'd7d2'),),
- ((9, 'd8d1', 'd8d2'),), ((10, 'd9d1', 'd9d2'),)]
+ ((3, 'd2d1', 'd2d2'),), ((4, 'd3d1', 'd3d2'),),
+ ((5, 'd4d1', 'd4d2'),), ((6, 'd5d1', 'd5d2'),),
+ ((7, 'd6d1', 'd6d2'),), ((8, 'd7d1', 'd7d2'),),
+ ((9, 'd8d1', 'd8d2'),), ((10, 'd9d1', 'd9d2'),)]
)
def test_filter_by(self):
"SELECT row_number() OVER (ORDER BY data.id, data.d1, data.d2) "
"AS anon_1 FROM data"
)
-
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30), nullable=False))
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey('users.id')),
- Column('email_address', String(50), nullable=False),
- )
+ Column('email_address', String(50), nullable=False))
@classmethod
def setup_classes(cls):
class User(cls.Basic):
pass
+
class Address(cls.Basic):
pass
mapper(User, users, properties={
'addresses': relationship(Address,
- passive_deletes="all", cascade="all, delete-orphan")
- })
+ passive_deletes="all",
+ cascade="all, delete-orphan")})
mapper(Address, addresses)
assert_raises_message(
sa_exc.ArgumentError,
eq_(
repr(orm_util.CascadeOptions("all, delete-orphan")),
"CascadeOptions('delete,delete-orphan,expunge,"
- "merge,refresh-expire,save-update')"
+ "merge,refresh-expire,save-update')"
)
def test_cascade_immutable(self):
new = copy.deepcopy(old)
eq_(old, new)
-
def test_cascade_assignable(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
rel.cascade = "save-update, merge, expunge"
eq_(rel.cascade, set(['save-update', 'merge', 'expunge']))
- mapper(User, users, properties={
- 'addresses': rel
- })
+ mapper(User, users, properties={'addresses': rel})
am = mapper(Address, addresses)
configure_mappers()
eq_(rel.cascade,
set(['delete', 'delete-orphan', 'expunge', 'merge',
- 'refresh-expire', 'save-update'])
+ 'refresh-expire', 'save-update'])
)
def test_cascade_unicode(self):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30), nullable=False))
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey('users.id')),
- Column('email_address', String(50), nullable=False),
- )
+ Column('email_address', String(50), nullable=False))
Table('orders', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('users.id'), nullable=False),
- Column('description', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey(
+ 'users.id'), nullable=False),
+ Column('description', String(30)))
Table("dingalings", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('address_id', Integer, ForeignKey('addresses.id')),
- Column('data', String(30))
- )
+ Column('data', String(30)))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
+
class Order(cls.Comparable):
pass
+
class Dingaling(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
- users, Dingaling, Order, User, dingalings, Address, orders, addresses = (cls.tables.users,
- cls.classes.Dingaling,
- cls.classes.Order,
- cls.classes.User,
- cls.tables.dingalings,
- cls.classes.Address,
- cls.tables.orders,
- cls.tables.addresses)
+ users, Dingaling, Order, User, dingalings, Address, \
+ orders, addresses = (cls.tables.users,
+ cls.classes.Dingaling,
+ cls.classes.Order,
+ cls.classes.User,
+ cls.tables.dingalings,
+ cls.classes.Address,
+ cls.tables.orders,
+ cls.tables.addresses)
mapper(Address, addresses)
mapper(Order, orders)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- cascade='all, delete-orphan', backref='user'),
+ 'addresses': relationship(Address,
+ cascade='all, delete-orphan',
+ backref='user'),
- 'orders':relationship(Order,
- cascade='all, delete-orphan', order_by=orders.c.id)
+ 'orders': relationship(Order,
+ cascade='all, delete-orphan',
+ order_by=orders.c.id)
})
mapper(Dingaling, dingalings, properties={
- 'address' : relationship(Address)
+ 'address': relationship(Address)
})
def test_list_assignment_new(self):
Order(description='someotherorder')])
sess.add(u)
- u.orders=[Order(description="order 3"), Order(description="order 4")]
+ u.orders = [Order(description="order 3"), Order(description="order 4")]
sess.commit()
eq_(u, User(name='jack',
Order, User = self.classes.Order, self.classes.User
-
sess = sessionmaker(expire_on_commit=False)()
o1, o2, o3 = Order(description='o1'), Order(description='o2'), \
Order(description='o3')
u.orders.remove(o1)
assert o1 not in sess
-
def test_delete(self):
User, users, orders, Order = (self.classes.User,
- self.tables.users,
- self.tables.orders,
- self.classes.Order)
+ self.tables.users,
+ self.tables.orders,
+ self.classes.Order)
sess = create_session()
u = User(name='jack',
by default."""
User, addresses, users, Address = (self.classes.User,
- self.tables.addresses,
- self.tables.users,
- self.classes.Address)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.Address)
sess = create_session()
u = User(name='jack',
collection, not those that have been removed"""
User, Order, users, orders = (self.classes.User,
- self.classes.Order,
- self.tables.users,
- self.tables.orders)
-
+ self.classes.Order,
+ self.tables.users,
+ self.tables.orders)
sess = create_session()
u = User(name='jack',
scalars/collections unchanged."""
Dingaling, User, Address = (self.classes.Dingaling,
- self.classes.User,
- self.classes.Address)
-
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
u = User(name='jack')
def test_cascade_delete_plusorphans(self):
User, users, orders, Order = (self.classes.User,
- self.tables.users,
- self.tables.orders,
- self.classes.Order)
+ self.tables.users,
+ self.tables.orders,
+ self.classes.Order)
sess = create_session()
u = User(name='jack',
def test_collection_orphans(self):
User, users, orders, Order = (self.classes.User,
- self.tables.users,
- self.tables.orders,
- self.classes.Order)
+ self.tables.users,
+ self.tables.orders,
+ self.classes.Order)
sess = create_session()
u = User(name='jack',
eq_(select([func.count('*')]).select_from(users).scalar(), 1)
eq_(select([func.count('*')]).select_from(orders).scalar(), 0)
+
class O2MCascadeTest(fixtures.MappedTest):
run_inserts = None
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30), nullable=False))
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey('users.id')),
- Column('email_address', String(50), nullable=False),
- )
+ Column('email_address', String(50), nullable=False))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
users, User, Address, addresses = (
- cls.tables.users, cls.classes.User,
- cls.classes.Address, cls.tables.addresses)
+ cls.tables.users, cls.classes.User,
+ cls.classes.Address, cls.tables.addresses)
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref="user"),
+ 'addresses': relationship(Address, backref="user"),
})
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)))
Table('orders', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('users.id')),
- Column('description', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.id')),
+ Column('description', String(30)))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Order(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
User, Order, orders, users = (cls.classes.User,
- cls.classes.Order,
- cls.tables.orders,
- cls.tables.users)
+ cls.classes.Order,
+ cls.tables.orders,
+ cls.tables.users)
- mapper(User, users, properties = dict(
- orders = relationship(
+ mapper(User, users, properties=dict(
+ orders=relationship(
mapper(Order, orders), cascade="all")
))
def test_cascade_delete_noorphans(self):
User, Order, orders, users = (self.classes.User,
- self.classes.Order,
- self.tables.orders,
- self.tables.users)
+ self.classes.Order,
+ self.tables.orders,
+ self.tables.users)
sess = create_session()
u = User(name='jack',
eq_(select([func.count('*')]).select_from(users).scalar(), 0)
eq_(select([func.count('*')]).select_from(orders).scalar(), 1)
+
class O2OSingleParentTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties={'address'
- : relationship(Address, backref=backref('user',
- single_parent=True), uselist=False)})
+ mapper(User, users,
+ properties={'address': relationship(
+ Address, backref=backref('user', single_parent=True),
+ uselist=False)})
def test_single_parent_raise(self):
User, Address = self.classes.User, self.classes.Address
assert u1.address is not a1
assert a1.user is None
+
class O2OSingleParentNoFlushTest(fixtures.MappedTest):
run_inserts = None
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30), nullable=False),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30), nullable=False))
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id'), nullable=False),
- Column('email_address', String(50), nullable=False),
- )
+ Column('email_address', String(50), nullable=False))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties={'address'
- : relationship(Address, backref=backref('user',
- single_parent=True, cascade="all, delete-orphan"),
- uselist=False)})
+ mapper(User, users,
+ properties={'address': relationship(
+ Address, backref=backref('user', single_parent=True,
+ cascade="all, delete-orphan"),
+ uselist=False)})
def test_replace_attribute_no_flush(self):
# test [ticket:2921]
sess.add(a2)
u1.address = a2
+
class NoSaveCascadeFlushTest(_fixtures.FixtureTest):
"""Test related item not present in session, commit proceeds."""
run_inserts = None
def _one_to_many_fixture(self, o2m_cascade=True,
- m2o_cascade=True,
- o2m=False,
- m2o=False,
- o2m_cascade_backrefs=True,
- m2o_cascade_backrefs=True):
+ m2o_cascade=True,
+ o2m=False,
+ m2o=False,
+ o2m_cascade_backrefs=True,
+ m2o_cascade_backrefs=True):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
if o2m:
if m2o:
- addresses_rel = {'addresses':relationship(
- Address,
- cascade_backrefs=o2m_cascade_backrefs,
- cascade=o2m_cascade and 'save-update' or '',
- backref=backref('user',
- cascade=m2o_cascade and 'save-update' or '',
- cascade_backrefs=m2o_cascade_backrefs
- )
- )}
+ addresses_rel = {'addresses': relationship(
+ Address,
+ cascade_backrefs=o2m_cascade_backrefs,
+ cascade=o2m_cascade and 'save-update' or '',
+ backref=backref(
+ 'user', cascade=m2o_cascade and 'save-update' or '',
+ cascade_backrefs=m2o_cascade_backrefs)
+ )}
else:
- addresses_rel = {'addresses':relationship(
- Address,
- cascade=o2m_cascade and 'save-update' or '',
- cascade_backrefs=o2m_cascade_backrefs,
- )}
+ addresses_rel = {'addresses': relationship(
+ Address,
+ cascade=o2m_cascade and 'save-update' or '',
+ cascade_backrefs=o2m_cascade_backrefs,
+ )}
user_rel = {}
elif m2o:
- user_rel = {'user':relationship(User,
- cascade=m2o_cascade and 'save-update' or '',
- cascade_backrefs=m2o_cascade_backrefs
- )}
+ user_rel = {'user': relationship(
+ User, cascade=m2o_cascade and 'save-update' or '',
+ cascade_backrefs=m2o_cascade_backrefs
+ )}
addresses_rel = {}
else:
addresses_rel = {}
mapper(Address, addresses, properties=user_rel)
def _many_to_many_fixture(self, fwd_cascade=True,
- bkd_cascade=True,
- fwd=False,
- bkd=False,
- fwd_cascade_backrefs=True,
- bkd_cascade_backrefs=True):
-
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ bkd_cascade=True,
+ fwd=False,
+ bkd=False,
+ fwd_cascade_backrefs=True,
+ bkd_cascade_backrefs=True):
+
+ keywords, items, item_keywords, Keyword, Item = \
+ (self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
if fwd:
if bkd:
- keywords_rel = {'keywords':relationship(
- Keyword,
- secondary=item_keywords,
- cascade_backrefs=fwd_cascade_backrefs,
- cascade=fwd_cascade and 'save-update' or '',
- backref=backref('items',
- cascade=bkd_cascade and 'save-update' or '',
- cascade_backrefs=bkd_cascade_backrefs
- )
- )}
+ keywords_rel = {'keywords': relationship(
+ Keyword,
+ secondary=item_keywords,
+ cascade_backrefs=fwd_cascade_backrefs,
+ cascade=fwd_cascade and 'save-update' or '',
+ backref=backref(
+ 'items',
+ cascade=bkd_cascade and 'save-update' or '',
+ cascade_backrefs=bkd_cascade_backrefs))}
else:
- keywords_rel = {'keywords':relationship(
+ keywords_rel = {'keywords': relationship(
Keyword,
secondary=item_keywords,
cascade=fwd_cascade and 'save-update' or '',
- cascade_backrefs=fwd_cascade_backrefs,
- )}
+ cascade_backrefs=fwd_cascade_backrefs)}
items_rel = {}
elif bkd:
- items_rel = {'items':relationship(Item,
- secondary=item_keywords,
- cascade=bkd_cascade and 'save-update' or '',
- cascade_backrefs=bkd_cascade_backrefs
- )}
+ items_rel = {'items': relationship(
+ Item,
+ secondary=item_keywords,
+ cascade=bkd_cascade and 'save-update' or '',
+ cascade_backrefs=bkd_cascade_backrefs
+ )}
keywords_rel = {}
else:
keywords_rel = {}
User, Address = self.classes.User, self.classes.Address
self._one_to_many_fixture(o2m=True, m2o=True,
- o2m_cascade=False)
+ o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
a1 = Address(email_address='a1')
User, Address = self.classes.User, self.classes.Address
self._one_to_many_fixture(o2m=True, m2o=True,
- o2m_cascade=False)
+ o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
a1 = Address(email_address='a1')
sess.add(u1)
assert u1 in sess
assert a1 not in sess
+
@testing.emits_warning(r'.*not in session')
def go():
sess.commit()
User, Address = self.classes.User, self.classes.Address
self._one_to_many_fixture(o2m=True, m2o=True,
- o2m_cascade=False)
+ o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
a1 = Address(email_address='a1')
User, Address = self.classes.User, self.classes.Address
self._one_to_many_fixture(o2m=True, m2o=True,
- o2m_cascade=False)
+ o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
a1 = Address(email_address='a1')
sess.expunge(a1)
assert u1 in sess
assert a1 not in sess
+
@testing.emits_warning(r'.*not in session')
def go():
sess.commit()
sess.add(a1)
assert u1 not in sess
assert a1 in sess
+
@testing.emits_warning(r'.*not in session')
def go():
sess.commit()
sess.expunge(u1)
assert u1 not in sess
assert a1 in sess
+
@testing.emits_warning(r'.*not in session')
def go():
sess.commit()
Item, Keyword = self.classes.Item, self.classes.Keyword
self._many_to_many_fixture(fwd=True, bkd=True,
- fwd_cascade=False)
+ fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
k1 = Keyword(name='k1')
Item, Keyword = self.classes.Item, self.classes.Keyword
self._many_to_many_fixture(fwd=True, bkd=True,
- fwd_cascade=False)
+ fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
k1 = Keyword(name='k1')
sess.add(i1)
assert i1 in sess
assert k1 not in sess
+
@testing.emits_warning(r'.*not in session')
def go():
sess.commit()
Item, Keyword = self.classes.Item, self.classes.Keyword
self._many_to_many_fixture(fwd=True, bkd=True,
- fwd_cascade=False)
+ fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
k1 = Keyword(name='k1')
Item, Keyword = self.classes.Item, self.classes.Keyword
self._many_to_many_fixture(fwd=True, bkd=True,
- fwd_cascade=False)
+ fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
k1 = Keyword(name='k1')
sess.expunge(k1)
assert i1 in sess
assert k1 not in sess
+
@testing.emits_warning(r'.*not in session')
def go():
sess.commit()
go()
eq_(i1.keywords, [])
+
class NoSaveCascadeBackrefTest(_fixtures.FixtureTest):
"""test that backrefs don't force save-update cascades to occur
when the cascade initiated from the forwards side."""
def test_unidirectional_cascade_o2m(self):
User, Order, users, orders = (self.classes.User,
- self.classes.Order,
- self.tables.users,
- self.tables.orders)
+ self.classes.Order,
+ self.tables.users,
+ self.tables.orders)
mapper(Order, orders)
- mapper(User, users, properties = dict(
- orders = relationship(
+ mapper(User, users, properties=dict(
+ orders=relationship(
Order, backref=backref("user", cascade=None))
))
def test_unidirectional_cascade_m2o(self):
User, Order, users, orders = (self.classes.User,
- self.classes.Order,
- self.tables.users,
- self.tables.orders)
+ self.classes.Order,
+ self.tables.users,
+ self.tables.orders)
mapper(Order, orders, properties={
- 'user':relationship(User, backref=backref("orders", cascade=None))
+ 'user': relationship(User, backref=backref("orders", cascade=None))
})
mapper(User, users)
assert u1 in sess
def test_unidirectional_cascade_m2m(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
-
- mapper(Item, items, properties={'keywords'
- : relationship(Keyword, secondary=item_keywords,
- cascade='none', backref='items')})
+ keywords, items, item_keywords, Keyword, Item = \
+ (self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
+
+ mapper(Item, items,
+ properties={'keywords': relationship(Keyword,
+ secondary=item_keywords,
+ cascade='none',
+ backref='items')})
mapper(Keyword, keywords)
sess = create_session()
@classmethod
def define_tables(cls, metadata):
Table('extra', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ primary_key=True,
+ test_needs_autoincrement=True),
Column('prefs_id', Integer, ForeignKey('prefs.id')))
Table('prefs', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table(
'users',
Column('name', String(40)),
Column('pref_id', Integer, ForeignKey('prefs.id')),
Column('foo_id', Integer, ForeignKey('foo.id')),
- )
- Table('foo', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True), Column('data',
- String(40)))
+ )
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Pref(cls.Comparable):
pass
+
class Extra(cls.Comparable):
pass
+
class Foo(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
extra, foo, users, Extra, Pref, User, prefs, Foo = (cls.tables.extra,
- cls.tables.foo,
- cls.tables.users,
- cls.classes.Extra,
- cls.classes.Pref,
- cls.classes.User,
- cls.tables.prefs,
- cls.classes.Foo)
+ cls.tables.foo,
+ cls.tables.users,
+ cls.classes.Extra,
+ cls.classes.Pref,
+ cls.classes.User,
+ cls.tables.prefs,
+ cls.classes.Foo)
mapper(Extra, extra)
- mapper(Pref, prefs, properties=dict(extra=relationship(Extra,
- cascade='all, delete')))
- mapper(User, users, properties=dict(pref=relationship(Pref,
- lazy='joined', cascade='all, delete-orphan',
- single_parent=True), foo=relationship(Foo))) # straight m2o
+ mapper(Pref, prefs, properties=dict(
+ extra=relationship(Extra, cascade='all, delete')))
+ mapper(User, users, properties=dict(
+ pref=relationship(Pref, lazy='joined',
+ cascade='all, delete-orphan',
+ single_parent=True),
+ foo=relationship(Foo))) # straight m2o
mapper(Foo, foo)
@classmethod
def insert_data(cls):
Pref, User, Extra = (cls.classes.Pref,
- cls.classes.User,
- cls.classes.Extra)
+ cls.classes.User,
+ cls.classes.Extra)
u1 = User(name='ed', pref=Pref(data="pref 1", extra=[Extra()]))
u2 = User(name='jack', pref=Pref(data="pref 2", extra=[Extra()]))
def test_orphan(self):
prefs, User, extra = (self.tables.prefs,
- self.classes.User,
- self.tables.extra)
+ self.classes.User,
+ self.tables.extra)
sess = create_session()
eq_(select([func.count('*')]).select_from(prefs).scalar(), 3)
Foo, User = self.classes.Foo, self.classes.User
-
sess = sessionmaker(expire_on_commit=True)()
-
u1 = User(name='jack', foo=Foo(data='f1'))
sess.add(u1)
sess.commit()
Pref, User = self.classes.Pref, self.classes.User
-
sess = sessionmaker(expire_on_commit=False)()
p1, p2 = Pref(data='p1'), Pref(data='p2')
-
u = User(name='jack', pref=p1)
sess.add(u)
sess.commit()
def test_orphan_on_update(self):
prefs, User, extra = (self.tables.prefs,
- self.classes.User,
- self.tables.extra)
+ self.classes.User,
+ self.tables.extra)
sess = create_session()
jack = sess.query(User).filter_by(name="jack").one()
Pref, User = self.classes.Pref, self.classes.User
-
sess = create_session()
jack = sess.query(User).filter_by(name="jack").one()
eq_(sess.query(Pref).order_by(Pref.id).all(),
[Pref(data="pref 1"), Pref(data="pref 3"), Pref(data="newpref")])
+
class M2OCascadeDeleteOrphanTestTwo(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)),
Column('t2id', Integer, ForeignKey('t2.id')))
Table('t2', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)),
Column('t3id', Integer, ForeignKey('t3.id')))
Table('t3', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
+
class T3(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
t2, T2, T3, t1, t3, T1 = (cls.tables.t2,
- cls.classes.T2,
- cls.classes.T3,
- cls.tables.t1,
- cls.tables.t3,
- cls.classes.T1)
-
- mapper(T1, t1, properties=dict(t2=relationship(T2,
- cascade='all, delete-orphan', single_parent=True)))
- mapper(T2, t2, properties=dict(t3=relationship(T3,
- cascade='all, delete-orphan', single_parent=True,
- backref=backref('t2', uselist=False))))
+ cls.classes.T2,
+ cls.classes.T3,
+ cls.tables.t1,
+ cls.tables.t3,
+ cls.classes.T1)
+
+ mapper(T1, t1, properties=dict(
+ t2=relationship(T2, cascade='all, delete-orphan',
+ single_parent=True)))
+ mapper(T2, t2, properties=dict(
+ t3=relationship(T3, cascade='all, delete-orphan',
+ single_parent=True,
+ backref=backref('t2', uselist=False))))
mapper(T3, t3)
def test_cascade_delete(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
def test_deletes_orphans_onelevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x2 = T1(data='t1b', t2=T2(data='t2b', t3=T3(data='t3b')))
def test_deletes_orphans_twolevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
eq_(sess.query(T2).all(), [])
eq_(sess.query(T3).all(), [])
-
def test_finds_orphans_twolevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
def test_single_parent_raise(self):
T2, T1 = self.classes.T2, self.classes.T1
-
sess = create_session()
y = T2(data='T2a')
def test_single_parent_backref(self):
T2, T3 = self.classes.T2, self.classes.T3
-
sess = create_session()
y = T3(data='T3a')
assert z.t3 is y
assert x.t3 is None
+
class M2OCascadeDeleteNoOrphanTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data',String(50)),
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
Column('t2id', Integer, ForeignKey('t2.id')))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data',String(50)),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
Column('t3id', Integer, ForeignKey('t3.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
+
class T3(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
t2, T2, T3, t1, t3, T1 = (cls.tables.t2,
- cls.classes.T2,
- cls.classes.T3,
- cls.tables.t1,
- cls.tables.t3,
- cls.classes.T1)
+ cls.classes.T2,
+ cls.classes.T3,
+ cls.tables.t1,
+ cls.tables.t3,
+ cls.classes.T1)
mapper(T1, t1, properties={'t2': relationship(T2, cascade="all")})
mapper(T2, t2, properties={'t3': relationship(T3, cascade="all")})
def test_cascade_delete(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
def test_cascade_delete_postappend_onelevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x1 = T1(data='t1', )
def test_cascade_delete_postappend_twolevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x1 = T1(data='t1', t2=T2(data='t2'))
def test_preserves_orphans_onelevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x2 = T1(data='t1b', t2=T2(data='t2b', t3=T3(data='t3b')))
@testing.future
def test_preserves_orphans_onelevel_postremove(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x2 = T1(data='t1b', t2=T2(data='t2b', t3=T3(data='t3b')))
def test_preserves_orphans_twolevel(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
sess = create_session()
x = T1(data='t1a', t2=T2(data='t2a', t3=T3(data='t3a')))
eq_(sess.query(T3).all(), [T3()])
-
class M2MCascadeTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30)),
- test_needs_fk=True
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)),
+ test_needs_fk=True)
Table('b', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30)),
- test_needs_fk=True
-
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)),
+ test_needs_fk=True)
Table('atob', metadata,
- Column('aid', Integer, ForeignKey('a.id')),
- Column('bid', Integer, ForeignKey('b.id')),
- test_needs_fk=True
-
- )
+ Column('aid', Integer, ForeignKey('a.id')),
+ Column('bid', Integer, ForeignKey('b.id')),
+ test_needs_fk=True)
Table('c', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(30)),
Column('bid', Integer, ForeignKey('b.id')),
- test_needs_fk=True
-
- )
+ test_needs_fk=True)
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
+
class C(cls.Comparable):
pass
def test_delete_orphan(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
-
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
# if no backref here, delete-orphan failed until [ticket:427]
# was fixed
- mapper(A, a, properties={'bs': relationship(B, secondary=atob,
- cascade='all, delete-orphan', single_parent=True)})
+ mapper(A, a,
+ properties={'bs': relationship(B, secondary=atob,
+ cascade='all, delete-orphan',
+ single_parent=True)})
mapper(B, b)
sess = create_session()
def test_delete_orphan_dynamic(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
-
- mapper(A, a, properties={'bs': relationship(B, secondary=atob,
- cascade='all, delete-orphan', single_parent=True,
- lazy='dynamic')}) # if no backref here, delete-orphan
- # failed until [ticket:427] was fixed
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
+
+ mapper(A, a,
+ # if no backref here, delete-orphan
+ properties={'bs': relationship(B, secondary=atob,
+ cascade='all, delete-orphan',
+ single_parent=True,
+ lazy='dynamic')})
+ # failed until [ticket:427] was fixed
mapper(B, b)
sess = create_session()
def test_delete_orphan_cascades(self):
a, A, c, b, C, B, atob = (self.tables.a,
- self.classes.A,
- self.tables.c,
- self.tables.b,
- self.classes.C,
- self.classes.B,
- self.tables.atob)
+ self.classes.A,
+ self.tables.c,
+ self.tables.b,
+ self.classes.C,
+ self.classes.B,
+ self.tables.atob)
mapper(A, a, properties={
# if no backref here, delete-orphan failed until [ticket:427] was
# fixed
- 'bs':relationship(B, secondary=atob, cascade="all, delete-orphan",
- single_parent=True)
+ 'bs': relationship(B, secondary=atob, cascade="all, delete-orphan",
+ single_parent=True)
})
- mapper(B, b, properties={'cs':
- relationship(C, cascade="all, delete-orphan")})
+ mapper(B, b,
+ properties={'cs': relationship(C,
+ cascade="all, delete-orphan")})
mapper(C, c)
sess = create_session()
def test_cascade_delete(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
mapper(A, a, properties={
- 'bs':relationship(B, secondary=atob, cascade="all, delete-orphan",
- single_parent=True)
+ 'bs': relationship(B, secondary=atob, cascade="all, delete-orphan",
+ single_parent=True)
})
mapper(B, b)
def test_single_parent_error(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
mapper(A, a, properties={
- 'bs':relationship(B, secondary=atob,
- cascade="all, delete-orphan")
+ 'bs': relationship(B, secondary=atob,
+ cascade="all, delete-orphan")
})
mapper(B, b)
assert_raises_message(
def test_single_parent_raise(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
mapper(A, a, properties={
- 'bs':relationship(B, secondary=atob, cascade="all, delete-orphan",
- single_parent=True)
+ 'bs': relationship(B, secondary=atob, cascade="all, delete-orphan",
+ single_parent=True)
})
mapper(B, b)
sess = create_session()
- b1 =B(data='b1')
+ b1 = B(data='b1')
a1 = A(data='a1', bs=[b1])
assert_raises(sa_exc.InvalidRequestError,
- A, data='a2', bs=[b1]
- )
+ A, data='a2', bs=[b1])
def test_single_parent_backref(self):
- """test that setting m2m via a uselist=False backref bypasses the single_parent raise"""
+ """test that setting m2m via a uselist=False backref bypasses the
+ single_parent raise"""
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
-
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
mapper(A, a, properties={
- 'bs':relationship(B,
- secondary=atob,
- cascade="all, delete-orphan", single_parent=True,
- backref=backref('a', uselist=False))
+ 'bs': relationship(B,
+ secondary=atob,
+ cascade="all, delete-orphan",
+ single_parent=True,
+ backref=backref('a', uselist=False))
})
mapper(B, b)
sess = create_session()
- b1 =B(data='b1')
+ b1 = B(data='b1')
a1 = A(data='a1', bs=[b1])
assert_raises(
def test_none_m2m_collection_assignment(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
-
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
mapper(A, a, properties={
'bs': relationship(B,
- secondary=atob, backref="as")
+ secondary=atob, backref="as")
})
mapper(B, b)
def test_none_m2m_collection_append(self):
a, A, B, b, atob = (self.tables.a,
- self.classes.A,
- self.classes.B,
- self.tables.b,
- self.tables.atob)
-
+ self.classes.A,
+ self.classes.B,
+ self.tables.b,
+ self.tables.atob)
mapper(A, a, properties={
- 'bs': relationship(B,
- secondary=atob, backref="as")
+ 'bs': relationship(B, secondary=atob, backref="as")
})
mapper(B, b)
)
eq_(a1.bs, [None])
+
class O2MSelfReferentialDetelOrphanTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('node', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('node.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('node.id')))
@classmethod
def setup_classes(cls):
Node = cls.classes.Node
node = cls.tables.node
mapper(Node, node, properties={
- "children":relationship(
- Node,
- cascade="all, delete-orphan",
- backref=backref(
- "parent",
- remote_side=node.c.id
- )
- )
+ "children": relationship(
+ Node,
+ cascade="all, delete-orphan",
+ backref=backref(
+ "parent",
+ remote_side=node.c.id
+ )
+ )
})
def test_self_referential_delete(self):
@classmethod
def setup_mappers(cls):
- addresses, Dingaling, User, dingalings, Address, users = (cls.tables.addresses,
- cls.classes.Dingaling,
- cls.classes.User,
- cls.tables.dingalings,
- cls.classes.Address,
- cls.tables.users)
+ addresses, Dingaling, User, dingalings, Address, users = \
+ (cls.tables.addresses,
+ cls.classes.Dingaling,
+ cls.classes.User,
+ cls.tables.dingalings,
+ cls.classes.Address,
+ cls.tables.users)
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user',
- cascade_backrefs=False)
+ 'addresses': relationship(Address, backref='user',
+ cascade_backrefs=False)
})
mapper(Dingaling, dingalings, properties={
- 'address' : relationship(Address, backref='dingalings',
- cascade_backrefs=False)
+ 'address': relationship(Address, backref='dingalings',
+ cascade_backrefs=False)
})
def test_o2m_basic(self):
a1.user = u1
assert a1 not in sess
-
def test_o2m_commit_warns(self):
User, Address = self.classes.User, self.classes.Address
assert a1 not in sess
-
def test_o2m_flag_on_backref(self):
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess.commit
)
+
class PendingOrphanTestSingleLevel(fixtures.MappedTest):
"""Pending entities that are orphans"""
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('user_id', Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(40)))
+ Column('user_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(40)))
Table('addresses', metadata,
- Column('address_id', Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('users.user_id')),
- Column('email_address', String(40)))
+ Column('address_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.user_id')),
+ Column('email_address', String(40)))
Table('orders', metadata,
- Column('order_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('users.user_id'), nullable=False),
- )
+ Column('order_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey(
+ 'users.user_id'), nullable=False))
+
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
+
class Order(cls.Comparable):
pass
"""
- users, orders, User, Address, Order, addresses = (self.tables.users,
- self.tables.orders,
- self.classes.User,
- self.classes.Address,
- self.classes.Order,
- self.tables.addresses)
-
+ users, orders, User, Address, Order, addresses = \
+ (self.tables.users,
+ self.tables.orders,
+ self.classes.User,
+ self.classes.Address,
+ self.classes.Order,
+ self.tables.addresses)
mapper(Order, orders)
mapper(Address, addresses)
mapper(User, users, properties=dict(
addresses=relationship(Address, cascade="all,delete-orphan",
- backref="user"),
+ backref="user"),
orders=relationship(Order, cascade='all, delete-orphan')
))
s = Session()
s.commit()
assert o in s and o not in s.new
-
def test_pending_collection_expunge(self):
"""Removing a pending item from a collection expunges it from
the session."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties=dict(
addresses=relationship(Address, cascade="all,delete-orphan",
- backref="user")
+ backref="user")
))
s = create_session()
def test_nonorphans_ok(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties=dict(
addresses=relationship(Address, cascade="all,delete",
- backref="user")
+ backref="user")
))
s = create_session()
u = User(name='u1', addresses=[Address(email_address='ad1')])
@classmethod
def define_tables(cls, metadata):
Table('order', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('item', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('order_id', Integer, ForeignKey('order.id'), nullable=False)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('order_id', Integer, ForeignKey(
+ 'order.id'), nullable=False))
Table('attribute', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('item_id', Integer, ForeignKey('item.id'), nullable=False)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('item_id', Integer, ForeignKey('item.id'),
+ nullable=False))
@classmethod
def setup_classes(cls):
class Order(cls.Comparable):
pass
+
class Item(cls.Comparable):
pass
+
class Attribute(cls.Comparable):
pass
def test_singlelevel_remove(self):
item, Order, order, Item = (self.tables.item,
- self.classes.Order,
- self.tables.order,
- self.classes.Item)
+ self.classes.Order,
+ self.tables.order,
+ self.classes.Item)
mapper(Order, order, properties={
- 'items':relationship(Item, cascade="all, delete-orphan")
+ 'items': relationship(Item, cascade="all, delete-orphan")
})
mapper(Item, item)
s = Session()
assert i1 not in o1.items
def test_multilevel_remove(self):
- Item, Attribute, order, item, attribute, Order = (self.classes.Item,
- self.classes.Attribute,
- self.tables.order,
- self.tables.item,
- self.tables.attribute,
- self.classes.Order)
+ Item, Attribute, order, item, attribute, Order = \
+ (self.classes.Item,
+ self.classes.Attribute,
+ self.tables.order,
+ self.tables.item,
+ self.tables.attribute,
+ self.classes.Order)
mapper(Order, order, properties={
- 'items':relationship(Item, cascade="all, delete-orphan")
+ 'items': relationship(Item, cascade="all, delete-orphan")
})
mapper(Item, item, properties={
- 'attributes':relationship(Attribute, cascade="all, delete-orphan")
+ 'attributes': relationship(Attribute, cascade="all, delete-orphan")
})
mapper(Attribute, attribute)
s = Session()
assert i1 not in s
assert a1 not in o1.items
+
class DoubleParentO2MOrphanTest(fixtures.MappedTest):
"""Test orphan behavior on an entity that requires
two parents via many-to-one (one-to-many collection.).
@classmethod
def define_tables(cls, meta):
Table('sales_reps', meta,
- Column('sales_rep_id', Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('sales_rep_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
Table('accounts', meta,
- Column('account_id', Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column('balance', Integer))
+ Column('account_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('balance', Integer))
Table('customers', meta,
- Column('customer_id', Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('sales_rep_id', Integer,
- ForeignKey('sales_reps.sales_rep_id')),
- Column('account_id', Integer,
- ForeignKey('accounts.account_id')))
+ Column('customer_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('sales_rep_id', Integer,
+ ForeignKey('sales_reps.sales_rep_id')),
+ Column('account_id', Integer,
+ ForeignKey('accounts.account_id')))
def _fixture(self, legacy_is_orphan, uselist):
sales_reps, customers, accounts = (self.tables.sales_reps,
- self.tables.customers,
- self.tables.accounts)
-
+ self.tables.customers,
+ self.tables.accounts)
class Customer(fixtures.ComparableEntity):
pass
+
class Account(fixtures.ComparableEntity):
pass
+
class SalesRep(fixtures.ComparableEntity):
pass
mapper(Customer, customers, legacy_is_orphan=legacy_is_orphan)
mapper(Account, accounts, properties=dict(
customers=relationship(Customer,
- cascade="all,delete-orphan",
- backref="account",
- uselist=uselist)))
+ cascade="all,delete-orphan",
+ backref="account",
+ uselist=uselist)))
mapper(SalesRep, sales_reps, properties=dict(
customers=relationship(Customer,
- cascade="all,delete-orphan",
- backref="sales_rep",
- uselist=uselist)))
+ cascade="all,delete-orphan",
+ backref="sales_rep",
+ uselist=uselist)))
s = create_session()
a = Account(balance=0)
s, c, a, sr = self._fixture(True, False)
-
a.customers = None
assert c in s, "Should not expunge customer yet, still has one parent"
s, c, a, sr = self._fixture(False, False)
-
a.customers = None
assert c not in s, "Should expunge customer when either parent is gone"
@classmethod
def define_tables(cls, metadata):
Table('addresses', metadata,
- Column('address_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('street', String(30)),
- )
+ Column('address_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('street', String(30)))
Table('homes', metadata,
- Column('home_id', Integer, primary_key=True, key="id",
- test_needs_autoincrement=True),
- Column('description', String(30)),
- Column('address_id', Integer, ForeignKey('addresses.address_id'),
- nullable=False),
- )
+ Column('home_id', Integer, primary_key=True, key="id",
+ test_needs_autoincrement=True),
+ Column('description', String(30)),
+ Column('address_id', Integer, ForeignKey('addresses.address_id'),
+ nullable=False))
Table('businesses', metadata,
- Column('business_id', Integer, primary_key=True, key="id",
- test_needs_autoincrement=True),
- Column('description', String(30), key="description"),
- Column('address_id', Integer, ForeignKey('addresses.address_id'),
- nullable=False),
- )
+ Column('business_id', Integer, primary_key=True, key="id",
+ test_needs_autoincrement=True),
+ Column('description', String(30), key="description"),
+ Column('address_id', Integer, ForeignKey('addresses.address_id'),
+ nullable=False))
def test_non_orphan(self):
"""test that an entity can have two parent delete-orphan
cascades, and persists normally."""
homes, businesses, addresses = (self.tables.homes,
- self.tables.businesses,
- self.tables.addresses)
-
+ self.tables.businesses,
+ self.tables.addresses)
class Address(fixtures.ComparableEntity):
pass
+
class Home(fixtures.ComparableEntity):
pass
+
class Business(fixtures.ComparableEntity):
pass
mapper(Address, addresses)
- mapper(Home, homes, properties={'address'
- : relationship(Address, cascade='all,delete-orphan',
- single_parent=True)})
- mapper(Business, businesses, properties={'address'
- : relationship(Address, cascade='all,delete-orphan',
- single_parent=True)})
+ mapper(Home, homes, properties={'address': relationship(
+ Address, cascade='all,delete-orphan', single_parent=True)})
+ mapper(Business, businesses, properties={'address': relationship(
+ Address, cascade='all,delete-orphan', single_parent=True)})
session = create_session()
h1 = Home(description='home1', address=Address(street='address1'))
b1 = Business(description='business1',
address=Address(street='address2'))
- session.add_all((h1,b1))
+ session.add_all((h1, b1))
session.flush()
session.expunge_all()
eq_(session.query(Home).get(h1.id), Home(description='home1',
- address=Address(street='address1')))
+ address=Address(
+ street='address1')))
eq_(session.query(Business).get(b1.id),
Business(description='business1',
- address=Address(street='address2')))
+ address=Address(street='address2')))
def test_orphan(self):
"""test that an entity can have two parent delete-orphan
parent."""
homes, businesses, addresses = (self.tables.homes,
- self.tables.businesses,
- self.tables.addresses)
-
+ self.tables.businesses,
+ self.tables.addresses)
class Address(fixtures.ComparableEntity):
pass
pass
mapper(Address, addresses)
- mapper(Home, homes, properties={'address'
- : relationship(Address, cascade='all,delete-orphan',
- single_parent=True)})
- mapper(Business, businesses, properties={'address'
- : relationship(Address, cascade='all,delete-orphan',
- single_parent=True)})
+ mapper(Home, homes, properties={'address': relationship(
+ Address, cascade='all,delete-orphan', single_parent=True)})
+ mapper(Business, businesses, properties={'address': relationship(
+ Address, cascade='all,delete-orphan', single_parent=True)})
session = create_session()
a1 = Address()
session.add(a1)
session.flush()
+
class CollectionAssignmentOrphanTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('table_a', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(30)))
Table('table_b', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(30)),
Column('a_id', Integer, ForeignKey('table_a.id')))
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, table_a, properties={
- 'bs':relationship(B, cascade="all, delete-orphan")
- })
+ 'bs': relationship(B, cascade="all, delete-orphan")
+ })
mapper(B, table_b)
a1 = A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')])
a1 = sess.query(A).get(a1.id)
assert not class_mapper(B)._is_orphan(
attributes.instance_state(a1.bs[0]))
- a1.bs[0].foo='b2modified'
- a1.bs[1].foo='b3modified'
+ a1.bs[0].foo = 'b2modified'
+ a1.bs[1].foo = 'b3modified'
sess.flush()
sess.expunge_all()
eq_(sess.query(A).get(a1.id),
A(name='a1', bs=[B(name='b1'), B(name='b2'), B(name='b3')]))
+
class OrphanCriterionTest(fixtures.MappedTest):
@classmethod
def define_tables(self, metadata):
Table("core", metadata,
- Column("id", Integer,
- primary_key=True, test_needs_autoincrement=True),
- Column("related_one_id", Integer, ForeignKey("related_one.id")),
- Column("related_two_id", Integer, ForeignKey("related_two.id"))
- )
+ Column("id", Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ Column("related_one_id", Integer, ForeignKey("related_one.id")),
+ Column("related_two_id", Integer, ForeignKey("related_two.id")))
Table("related_one", metadata,
- Column("id", Integer,
- primary_key=True, test_needs_autoincrement=True),
- )
+ Column("id", Integer,
+ primary_key=True, test_needs_autoincrement=True))
Table("related_two", metadata,
- Column("id", Integer,
- primary_key=True, test_needs_autoincrement=True),
- )
+ Column("id", Integer,
+ primary_key=True, test_needs_autoincrement=True))
def _fixture(self, legacy_is_orphan, persistent,
- r1_present, r2_present, detach_event=True):
+ r1_present, r2_present, detach_event=True):
class Core(object):
pass
mapper(Core, self.tables.core, legacy_is_orphan=legacy_is_orphan)
mapper(RelatedOne, self.tables.related_one, properties={
- 'cores': relationship(Core, cascade="all, delete-orphan",
- backref="r1")
- })
+ 'cores': relationship(Core, cascade="all, delete-orphan",
+ backref="r1")
+ })
mapper(RelatedTwo, self.tables.related_two, properties={
- 'cores': relationship(Core, cascade="all, delete-orphan",
- backref="r2")
- })
+ 'cores': relationship(Core, cascade="all, delete-orphan",
+ backref="r2")
+ })
c1 = Core()
if detach_event:
r1 = RelatedOne(cores=[c1])
self._assert_not_orphan(c1)
+
class O2MConflictTest(fixtures.MappedTest):
"""test that O2M dependency detects a change in parent, does the
right thing, and updates the collection/attribute.
@classmethod
def define_tables(cls, metadata):
Table("parent", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True)
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table("child", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('parent.id'),
- nullable=False)
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('parent.id'),
+ nullable=False))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
+
class Child(cls.Comparable):
pass
sess.add(p2)
sess.flush()
- eq_(sess.query(Child).filter(Child.parent_id==p2.id).all(), [c1])
+ eq_(sess.query(Child).filter(Child.parent_id == p2.id).all(), [c1])
def test_o2o_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False)
+ 'child': relationship(Child, uselist=False)
})
mapper(Child, child)
self._do_move_test(True)
def test_o2m_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=True)
+ 'child': relationship(Child, uselist=True)
})
mapper(Child, child)
self._do_move_test(True)
def test_o2o_backref_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False, backref='parent')
+ 'child': relationship(Child, uselist=False, backref='parent')
})
mapper(Child, child)
self._do_move_test(True)
def test_o2o_delcascade_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False, cascade="all, delete")
+ 'child': relationship(Child, uselist=False, cascade="all, delete")
})
mapper(Child, child)
self._do_move_test(True)
def test_o2o_delorphan_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False,
- cascade="all, delete, delete-orphan")
+ 'child': relationship(Child, uselist=False,
+ cascade="all, delete, delete-orphan")
})
mapper(Child, child)
self._do_move_test(True)
def test_o2o_delorphan_backref_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False,
- cascade="all, delete, delete-orphan",
- backref='parent')
+ 'child': relationship(Child, uselist=False,
+ cascade="all, delete, delete-orphan",
+ backref='parent')
})
mapper(Child, child)
self._do_move_test(True)
def test_o2o_backref_delorphan_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent)
- mapper(Child, child, properties = {
- 'parent' : relationship(Parent, uselist=False, single_parent=True,
- backref=backref('child', uselist=False),
- cascade="all,delete,delete-orphan")
+ mapper(Child, child, properties={
+ 'parent': relationship(Parent, uselist=False, single_parent=True,
+ backref=backref('child', uselist=False),
+ cascade="all,delete,delete-orphan")
})
self._do_move_test(True)
self._do_move_test(False)
def test_o2m_backref_delorphan_delete_old(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent)
- mapper(Child, child, properties = {
- 'parent' : relationship(Parent, uselist=False, single_parent=True,
- backref=backref('child', uselist=True),
- cascade="all,delete,delete-orphan")
+ mapper(Child, child, properties={
+ 'parent': relationship(Parent, uselist=False, single_parent=True,
+ backref=backref('child', uselist=True),
+ cascade="all,delete,delete-orphan")
})
self._do_move_test(True)
self._do_move_test(False)
class PartialFlushTest(fixtures.MappedTest):
- """test cascade behavior as it relates to object lists passed to flush()."""
+ """test cascade behavior as it relates to object lists passed to flush().
+ """
@classmethod
def define_tables(cls, metadata):
Table("base", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("descr", String(50))
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("descr", String(50)))
Table("noninh_child", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('base_id', Integer, ForeignKey('base.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('base_id', Integer, ForeignKey('base.id')))
Table("parent", metadata,
- Column("id", Integer, ForeignKey("base.id"), primary_key=True)
- )
+ Column("id", Integer, ForeignKey("base.id"), primary_key=True))
Table("inh_child", metadata,
- Column("id", Integer, ForeignKey("base.id"), primary_key=True),
- Column("parent_id", Integer, ForeignKey("parent.id"))
- )
+ Column("id", Integer, ForeignKey("base.id"), primary_key=True),
+ Column("parent_id", Integer, ForeignKey("parent.id")))
def test_o2m_m2o(self):
base, noninh_child = self.tables.base, self.tables.noninh_child
class Base(fixtures.ComparableEntity):
pass
+
class Child(fixtures.ComparableEntity):
pass
mapper(Base, base, properties={
- 'children':relationship(Child, backref='parent')
+ 'children': relationship(Child, backref='parent')
})
mapper(Child, noninh_child)
"""test ticket 1306"""
base, inh_child, parent = (self.tables.base,
- self.tables.inh_child,
- self.tables.parent)
-
+ self.tables.inh_child,
+ self.tables.parent)
class Base(fixtures.ComparableEntity):
pass
+
class Parent(Base):
pass
+
class Child(Base):
pass
- mapper(Base,base)
+ mapper(Base, base)
mapper(Child, inh_child,
- inherits=Base,
- properties={'parent': relationship(
- Parent,
- backref='children',
- primaryjoin=inh_child.c.parent_id == parent.c.id
- )}
- )
+ inherits=Base,
+ properties={'parent': relationship(
+ Parent,
+ backref='children',
+ primaryjoin=inh_child.c.parent_id == parent.c.id
+ )})
-
- mapper(Parent,parent, inherits=Base)
+ mapper(Parent, parent, inherits=Base)
sess = create_session()
p1 = Parent()
assert p1 in sess.new
assert c1 not in sess.new
assert c2 in sess.new
-
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import assert_raises, assert_raises_message
+
class Canary(sa.orm.interfaces.AttributeExtension):
def __init__(self):
self.data = set()
self.added = set()
self.removed = set()
+
def append(self, obj, value, initiator):
assert value not in self.added
self.data.add(value)
self.added.add(value)
return value
+
def remove(self, obj, value, initiator):
assert value not in self.removed
self.data.remove(value)
self.removed.add(value)
+
def set(self, obj, value, oldvalue, initiator):
if isinstance(value, str):
value = CollectionsTest.entity_maker()
self.append(obj, value, None)
return value
+
class CollectionsTest(fixtures.ORMTest):
class Entity(object):
def __init__(self, a=None, b=None, c=None):
self.a = a
self.b = b
self.c = c
+
def __repr__(self):
return str((id(self), self.a, self.b, self.c))
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
adapter = collections.collection_adapter(obj.attr)
direct = obj.attr
if to_set is None:
- to_set = lambda col: set(col)
+ def to_set(col): return set(col)
def assert_eq():
self.assert_(to_set(direct) == canary.data)
self.assert_(set(adapter) == canary.data)
- assert_ne = lambda: self.assert_(to_set(direct) != canary.data)
+
+ def assert_ne():
+ self.assert_(to_set(direct) != canary.data)
e1, e2 = creator(), creator()
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
adapter = collections.collection_adapter(obj.attr)
assert_eq()
if util.reduce(and_, [hasattr(direct, a) for a in
- ('__delitem__', 'insert', '__len__')], True):
+ ('__delitem__', 'insert', '__len__')], True):
values = [creator(), creator(), creator(), creator()]
direct[slice(0, 1)] = values
control[slice(0, 1)] = values
values = [creator(), creator(), creator()]
control[:] = values
direct[:] = values
+
def invalid():
direct[slice(0, 6, 2)] = [creator()]
assert_raises(ValueError, invalid)
control[0:0] = values
assert_eq()
-
if hasattr(direct, '__delitem__') or hasattr(direct, '__delslice__'):
for i in range(1, 4):
e = creator()
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
direct = obj.attr
class ListLike(object):
def __init__(self):
self.data = list()
+
def append(self, item):
self.data.append(item)
+
def remove(self, item):
self.data.remove(item)
+
def insert(self, index, item):
self.data.insert(index, item)
+
def pop(self, index=-1):
return self.data.pop(index)
+
def extend(self):
assert False
+
def __len__(self):
return len(self.data)
+
def __setitem__(self, key, value):
self.data[key] = value
+
def __getitem__(self, key):
return self.data[key]
+
def __delitem__(self, key):
del self.data[key]
+
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
+
def __repr__(self):
return 'ListLike(%s)' % repr(self.data)
class ListLike(object):
def __init__(self):
self.data = list()
+
def append(self, item):
self.data.append(item)
+
def remove(self, item):
self.data.remove(item)
+
def insert(self, index, item):
self.data.insert(index, item)
+
def pop(self, index=-1):
return self.data.pop(index)
+
def extend(self):
assert False
+
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
+
def __repr__(self):
return 'ListLike(%s)' % repr(self.data)
def test_list_emulates(self):
class ListIsh(object):
__emulates__ = list
+
def __init__(self):
self.data = list()
+
def append(self, item):
self.data.append(item)
+
def remove(self, item):
self.data.remove(item)
+
def insert(self, index, item):
self.data.insert(index, item)
+
def pop(self, index=-1):
return self.data.pop(index)
+
def extend(self):
assert False
+
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
+
def __repr__(self):
return 'ListIsh(%s)' % repr(self.data)
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
adapter = collections.collection_adapter(obj.attr)
direct.add(item)
control.add(item)
assert_eq()
+
def zap():
for item in list(direct):
direct.remove(item)
addall(e)
addall(e)
-
if hasattr(direct, 'remove'):
e = creator()
addall(e)
except TypeError:
assert True
-
def _test_set_bulk(self, typecallable, creator=None):
if creator is None:
creator = self.entity_maker
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
direct = obj.attr
class SetLike(object):
def __init__(self):
self.data = set()
+
def add(self, item):
self.data.add(item)
+
def remove(self, item):
self.data.remove(item)
+
def discard(self, item):
self.data.discard(item)
+
def clear(self):
self.data.clear()
+
def pop(self):
return self.data.pop()
+
def update(self, other):
self.data.update(other)
+
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
def test_set_emulates(self):
class SetIsh(object):
__emulates__ = set
+
def __init__(self):
self.data = set()
+
def add(self, item):
self.data.add(item)
+
def remove(self, item):
self.data.remove(item)
+
def discard(self, item):
self.data.discard(item)
+
def pop(self):
return self.data.pop()
+
def update(self, other):
self.data.update(other)
+
def __iter__(self):
return iter(self.data)
+
def clear(self):
self.data.clear()
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
adapter = collections.collection_adapter(obj.attr)
direct.set(item)
control[item.a] = item
assert_eq()
+
def zap():
for item in list(adapter):
direct.remove(item)
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
direct = obj.attr
self.assert_(obj.attr is not direct)
self.assert_(obj.attr is not like_me)
self.assert_(
- set(collections.collection_adapter(obj.attr)) == set([e2]))
+ set(collections.collection_adapter(obj.attr)) == set([e2]))
self.assert_(e1 in canary.removed)
self.assert_(e2 in canary.added)
-
# key validity on bulk assignment is a basic feature of
# MappedCollection but is not present in basic, @converter-less
# dict collections.
self.assert_(obj.attr is not real_dict)
self.assert_('badkey' not in obj.attr)
eq_(set(collections.collection_adapter(obj.attr)),
- set([e2]))
+ set([e2]))
self.assert_(e3 not in canary.added)
else:
real_dict = dict(keyignored1=e3)
self.assert_(obj.attr is not real_dict)
self.assert_('keyignored1' not in obj.attr)
eq_(set(collections.collection_adapter(obj.attr)),
- set([e3]))
+ set([e3]))
self.assert_(e2 in canary.removed)
self.assert_(e3 in canary.added)
assert_raises_message(
sa_exc.ArgumentError,
'Type InstrumentedDict must elect an appender '
- 'method to be a collection class',
+ 'method to be a collection class',
self._test_adapter, dict, self.dictable_entity,
- to_set=lambda c: set(c.values())
+ to_set=lambda c: set(c.values())
)
assert_raises_message(
sa_exc.ArgumentError,
'Type InstrumentedDict must elect an appender method '
- 'to be a collection class',
+ 'to be a collection class',
self._test_dict, dict
)
@collection.internally_instrumented
def set(self, item, _sa_initiator=None):
self.__setitem__(item.a, item, _sa_initiator=_sa_initiator)
+
@collection.remover
@collection.internally_instrumented
def _remove(self, item, _sa_initiator=None):
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=MyDict, useobject=True)
+ extension=canary,
+ typecallable=MyDict, useobject=True)
f = Foo()
f.attr = {"k1": 1, "k2": 2}
current = self.data.get(item.a, None)
self.data[item.a] = item
return current
+
@collection.remover
def _remove(self, item):
del self.data[item.a]
+
def __setitem__(self, key, value):
self.data[key] = value
+
def __getitem__(self, key):
return self.data[key]
+
def __delitem__(self, key):
del self.data[key]
+
def values(self):
return list(self.data.values())
+
def __contains__(self, key):
return key in self.data
+
@collection.iterator
def itervalues(self):
return iter(self.data.values())
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
+
def __repr__(self):
return 'DictLike(%s)' % repr(self.data)
def test_dict_emulates(self):
class DictIsh(object):
__emulates__ = dict
+
def __init__(self):
self.data = dict()
current = self.data.get(item.a, None)
self.data[item.a] = item
return current
+
@collection.remover
def _remove(self, item):
del self.data[item.a]
+
def __setitem__(self, key, value):
self.data[key] = value
+
def __getitem__(self, key):
return self.data[key]
+
def __delitem__(self, key):
del self.data[key]
+
def values(self):
return list(self.data.values())
+
def __contains__(self, key):
return key in self.data
+
@collection.iterator
def itervalues(self):
return iter(self.data.values())
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
+
def __repr__(self):
return 'DictIsh(%s)' % repr(self.data)
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=typecallable, useobject=True)
+ extension=canary,
+ typecallable=typecallable,
+ useobject=True)
obj = Foo()
adapter = collections.collection_adapter(obj.attr)
class MyCollection(object):
def __init__(self):
self.data = set()
+
@collection.appender
def push(self, item):
self.data.add(item)
+
@collection.remover
def zark(self, item):
self.data.remove(item)
+
@collection.removes_return()
def maybe_zark(self, item):
if item in self.data:
self.data.remove(item)
return item
+
@collection.iterator
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
def test_object_emulates(self):
class MyCollection2(object):
__emulates__ = None
+
def __init__(self):
self.data = set()
# looks like a list
+
def append(self, item):
assert False
+
@collection.appender
def push(self, item):
self.data.add(item)
+
@collection.remover
def zark(self, item):
self.data.remove(item)
+
@collection.removes_return()
def maybe_zark(self, item):
if item in self.data:
self.data.remove(item)
return item
+
@collection.iterator
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
class Custom(object):
def __init__(self):
self.data = []
+
@collection.appender
@collection.adds('entity')
def put(self, entity):
canary = Canary()
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary,
- typecallable=Custom, useobject=True)
+ extension=canary,
+ typecallable=Custom, useobject=True)
obj = Foo()
adapter = collections.collection_adapter(obj.attr)
direct = obj.attr
control = list()
+
def assert_eq():
self.assert_(set(direct) == canary.data)
self.assert_(set(adapter) == canary.data)
creator = self.entity_maker
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- extension=canary, useobject=True)
+ extension=canary, useobject=True)
obj = Foo()
col1 = obj.attr
obj.attr[0] = e3
self.assert_(e3 in canary.data)
+
class DictHelpersTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('parents', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('label', String(128)))
Table('children', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('parents.id'),
nullable=False),
Column('a', String(128)),
def _test_scalar_mapped(self, collection_class):
parents, children, Parent, Child = (self.tables.parents,
- self.tables.children,
- self.classes.Parent,
- self.classes.Child)
+ self.tables.children,
+ self.classes.Parent,
+ self.classes.Child)
mapper(Child, children)
mapper(Parent, parents, properties={
'children': relationship(Child, collection_class=collection_class,
- cascade="all, delete-orphan")})
+ cascade="all, delete-orphan")})
p = Parent()
p.children['foo'] = Child('foo', 'value')
p = session.query(Parent).get(pid)
-
eq_(set(p.children.keys()), set(['foo', 'bar']))
cid = p.children['foo'].id
self.assert_(p.children['foo'].id != cid)
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 2)
+ len(list(collections.collection_adapter(p.children))) == 2)
session.flush()
session.expunge_all()
p = session.query(Parent).get(pid)
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 2)
+ len(list(collections.collection_adapter(p.children))) == 2)
collections.collection_adapter(p.children).remove_with_event(
p.children['foo'])
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 1)
+ len(list(collections.collection_adapter(p.children))) == 1)
session.flush()
session.expunge_all()
p = session.query(Parent).get(pid)
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 1)
+ len(list(collections.collection_adapter(p.children))) == 1)
del p.children['bar']
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 0)
+ len(list(collections.collection_adapter(p.children))) == 0)
session.flush()
session.expunge_all()
p = session.query(Parent).get(pid)
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 0)
-
+ len(list(collections.collection_adapter(p.children))) == 0)
def _test_composite_mapped(self, collection_class):
parents, children, Parent, Child = (self.tables.parents,
- self.tables.children,
- self.classes.Parent,
- self.classes.Child)
+ self.tables.children,
+ self.classes.Parent,
+ self.classes.Child)
mapper(Child, children)
mapper(Parent, parents, properties={
'children': relationship(Child, collection_class=collection_class,
- cascade="all, delete-orphan")
- })
+ cascade="all, delete-orphan")
+ })
p = Parent()
p.children[('foo', '1')] = Child('foo', '1', 'value 1')
p = session.query(Parent).get(pid)
self.assert_(
- set(p.children.keys()) == set([('foo', '1'), ('foo', '2')]))
+ set(p.children.keys()) == set([('foo', '1'), ('foo', '2')]))
cid = p.children[('foo', '1')].id
collections.collection_adapter(p.children).append_with_event(
p = session.query(Parent).get(pid)
self.assert_(
- set(p.children.keys()) == set([('foo', '1'), ('foo', '2')]))
+ set(p.children.keys()) == set([('foo', '1'), ('foo', '2')]))
self.assert_(p.children[('foo', '1')].id != cid)
self.assert_(
- len(list(collections.collection_adapter(p.children))) == 2)
+ len(list(collections.collection_adapter(p.children))) == 2)
def test_mapped_collection(self):
collection_class = collections.mapped_collection(lambda c: c.a)
collections.column_mapped_collection,
text('a'))
-
def test_column_mapped_collection(self):
children = self.tables.children
def __init__(self, keyfunc):
collections.MappedCollection.__init__(self, keyfunc)
util.OrderedDict.__init__(self)
- collection_class = lambda: Ordered2(lambda v: (v.a, v.b))
+
+ def collection_class():
+ return Ordered2(lambda v: (v.a, v.b))
self._test_composite_mapped(collection_class)
+
class ColumnMappedWSerialize(fixtures.MappedTest):
"""test the column_mapped_collection serializer against
multi-table and indirect table edge cases, including
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer(), primary_key=True),
- Column('b', String(128))
- )
+ Column('id', Integer(), primary_key=True),
+ Column('b', String(128)))
Table('bar', metadata,
- Column('id', Integer(), primary_key=True),
- Column('foo_id', Integer, ForeignKey('foo.id')),
- Column('bat_id', Integer),
- schema="x"
- )
+ Column('id', Integer(), primary_key=True),
+ Column('foo_id', Integer, ForeignKey('foo.id')),
+ Column('bat_id', Integer),
+ schema="x")
+
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(Foo):
pass
c3 = loads(dumps(c2))
eq_(c3.keyfunc(obj), expected)
+
class CustomCollectionsTest(fixtures.MappedTest):
"""test the integration of collections with mapped classes."""
def define_tables(cls, metadata):
Table('sometable', metadata,
Column('col1', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(30)))
Table('someothertable', metadata,
Column('col1', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('scol1', Integer,
ForeignKey('sometable.col1')),
Column('data', String(20)))
def test_basic(self):
someothertable, sometable = self.tables.someothertable, \
- self.tables.sometable
+ self.tables.sometable
class MyList(list):
pass
+
class Foo(object):
pass
+
class Bar(object):
pass
"""test that a 'set' can be used as a collection and can lazyload."""
someothertable, sometable = self.tables.someothertable, \
- self.tables.sometable
+ self.tables.sometable
class Foo(object):
pass
+
class Bar(object):
pass
mapper(Foo, sometable, properties={
"""test that a 'dict' can be used as a collection and can lazyload."""
someothertable, sometable = self.tables.someothertable, \
- self.tables.sometable
-
+ self.tables.sometable
class Foo(object):
pass
+
class Bar(object):
pass
+
class AppenderDict(dict):
@collection.appender
def set(self, item):
self[id(item)] = item
+
@collection.remover
def remove(self, item):
if id(item) in self:
collection and can lazyload."""
someothertable, sometable = self.tables.someothertable, \
- self.tables.sometable
-
+ self.tables.sometable
class Foo(object):
pass
+
class Bar(object):
def __init__(self, data): self.data = data
mapper(Foo, sometable, properties={
- 'bars':relationship(Bar,
- collection_class=collections.column_mapped_collection(
+ 'bars': relationship(
+ Bar, collection_class=collections.column_mapped_collection(
someothertable.c.data))
})
mapper(Bar, someothertable)
class ListLike(object):
def __init__(self):
self.data = list()
+
def append(self, item):
self.data.append(item)
+
def remove(self, item):
self.data.remove(item)
+
def insert(self, index, item):
self.data.insert(index, item)
+
def pop(self, index=-1):
return self.data.pop(index)
+
def extend(self):
assert False
+
def __len__(self):
return len(self.data)
+
def __setitem__(self, key, value):
self.data[key] = value
+
def __getitem__(self, key):
return self.data[key]
+
def __delitem__(self, key):
del self.data[key]
+
def __iter__(self):
return iter(self.data)
__hash__ = object.__hash__
+
def __eq__(self, other):
return self.data == other
+
def __repr__(self):
return 'ListLike(%s)' % repr(self.data)
def _test_list(self, listcls):
someothertable, sometable = self.tables.someothertable, \
- self.tables.sometable
+ self.tables.sometable
class Parent(object):
pass
+
class Child(object):
pass
def test_custom(self):
someothertable, sometable = self.tables.someothertable, \
- self.tables.sometable
+ self.tables.sometable
class Parent(object):
pass
+
class Child(object):
pass
class MyCollection(object):
def __init__(self):
self.data = []
+
@collection.appender
def append(self, value):
self.data.append(value)
+
@collection.remover
def remove(self, value):
self.data.remove(value)
+
@collection.iterator
def __iter__(self):
return iter(self.data)
def base_remove(self, x):
return "base_remove"
-
from sqlalchemy.orm.collections import _instrument_class
_instrument_class(Base)
def test_link_event(self):
canary = []
+
class Collection(list):
@collection.linker
def _on_link(self, obj):
instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True,
- typecallable=Collection, useobject=True)
+ typecallable=Collection, useobject=True)
f1 = Foo()
f1.attr.append(3)
f1.attr = []
assert not adapter._referenced_by_owner
-
-
-
-
-
metadata = MetaData(testing.db)
order = Table('orders', metadata,
- Column('id', Integer, primary_key=True),
- Column('employee_id', Integer, ForeignKey('employees.id'), nullable=False),
- Column('type', Unicode(16)))
+ Column('id', Integer, primary_key=True),
+ Column('employee_id', Integer, ForeignKey(
+ 'employees.id'), nullable=False),
+ Column('type', Unicode(16)))
employee = Table('employees', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', Unicode(16), unique=True, nullable=False))
+ Column('id', Integer, primary_key=True),
+ Column('name', Unicode(16), unique=True,
+ nullable=False))
product = Table('products', metadata,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True))
orderproduct = Table('orderproducts', metadata,
- Column('id', Integer, primary_key=True),
- Column('order_id', Integer, ForeignKey("orders.id"), nullable=False),
- Column('product_id', Integer, ForeignKey("products.id"), nullable=False),
- )
+ Column('id', Integer, primary_key=True),
+ Column('order_id', Integer, ForeignKey(
+ "orders.id"), nullable=False),
+ Column('product_id', Integer, ForeignKey(
+ "products.id"), nullable=False))
class Order(object):
pass
order_join = order.select().alias('pjoin')
order_mapper = mapper(Order, order,
- with_polymorphic=('*', order_join),
- polymorphic_on=order_join.c.type,
- polymorphic_identity='order',
- properties={
- 'orderproducts': relationship(OrderProduct, lazy='select', backref='order')}
- )
+ with_polymorphic=('*', order_join),
+ polymorphic_on=order_join.c.type,
+ polymorphic_identity='order',
+ properties={
+ 'orderproducts': relationship(
+ OrderProduct, lazy='select',
+ backref='order')}
+ )
mapper(Product, product,
- properties={
- 'orderproducts': relationship(OrderProduct, lazy='select', backref='product')}
- )
+ properties={
+ 'orderproducts': relationship(OrderProduct, lazy='select',
+ backref='product')}
+ )
mapper(Employee, employee,
- properties={
- 'orders': relationship(Order, lazy='select', backref='employee')})
+ properties={
+ 'orders': relationship(Order, lazy='select',
+ backref='employee')})
mapper(OrderProduct, orderproduct)
metadata = MetaData(testing.db)
order = Table('orders', metadata,
- Column('id', Integer, primary_key=True),
- Column('type', Unicode(16)))
+ Column('id', Integer, primary_key=True),
+ Column('type', Unicode(16)))
product = Table('products', metadata,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True))
orderproduct = Table('orderproducts', metadata,
- Column('id', Integer, primary_key=True),
- Column('order_id', Integer, ForeignKey("orders.id"), nullable=False),
- Column('product_id', Integer, ForeignKey("products.id"), nullable=False),
- )
+ Column('id', Integer, primary_key=True),
+ Column('order_id', Integer,
+ ForeignKey("orders.id"), nullable=False),
+ Column('product_id', Integer,
+ ForeignKey("products.id"),
+ nullable=False))
class Order(object):
pass
order_join = order.select().alias('pjoin')
order_mapper = mapper(Order, order,
- with_polymorphic=('*', order_join),
- polymorphic_on=order_join.c.type,
- polymorphic_identity='order',
- properties={
- 'orderproducts': relationship(OrderProduct, lazy='select', backref='product')}
- )
+ with_polymorphic=('*', order_join),
+ polymorphic_on=order_join.c.type,
+ polymorphic_identity='order',
+ properties={
+ 'orderproducts': relationship(
+ OrderProduct, lazy='select',
+ backref='product')}
+ )
mapper(Product, product,
- properties={
- 'orderproducts': relationship(OrderProduct, lazy='select', backref='product')}
- )
+ properties={
+ 'orderproducts': relationship(OrderProduct, lazy='select',
+ backref='product')}
+ )
mapper(OrderProduct, orderproduct)
def test_misc_one(self):
metadata = MetaData(testing.db)
node_table = Table("node", metadata,
- Column('node_id', Integer, primary_key=True),
- Column('name_index', Integer, nullable=True),
- )
+ Column('node_id', Integer, primary_key=True),
+ Column('name_index', Integer, nullable=True))
node_name_table = Table("node_name", metadata,
- Column('node_name_id', Integer, primary_key=True),
- Column('node_id', Integer, ForeignKey('node.node_id')),
- Column('host_id', Integer, ForeignKey('host.host_id')),
- Column('name', String(64), nullable=False),
- )
+ Column('node_name_id', Integer,
+ primary_key=True),
+ Column('node_id', Integer,
+ ForeignKey('node.node_id')),
+ Column('host_id', Integer,
+ ForeignKey('host.host_id')),
+ Column('name', String(64), nullable=False))
host_table = Table("host", metadata,
- Column('host_id', Integer, primary_key=True),
- Column('hostname', String(64), nullable=False,
- unique=True),
- )
+ Column('host_id', Integer, primary_key=True),
+ Column('hostname', String(64), nullable=False,
+ unique=True))
metadata.create_all()
try:
node_table.insert().execute(node_id=1, node_index=5)
- class Node(object):pass
- class NodeName(object):pass
- class Host(object):pass
+
+ class Node(object):
+ pass
+
+ class NodeName(object):
+ pass
+
+ class Host(object):
+ pass
node_mapper = mapper(Node, node_table)
host_mapper = mapper(Host, host_table)
node_name_mapper = mapper(NodeName, node_name_table,
- properties = {
- 'node' : relationship(Node, backref=backref('names')),
- 'host' : relationship(Host),
- }
- )
+ properties={
+ 'node': relationship(
+ Node, backref=backref('names')),
+ 'host': relationship(Host),
+ })
sess = create_session()
assert sess.query(Node).get(1).names == []
finally:
a = Table('a', meta, Column('id', Integer, primary_key=True))
b = Table('b', meta, Column('id', Integer, primary_key=True),
- Column('a_id', Integer, ForeignKey('a.id')))
+ Column('a_id', Integer, ForeignKey('a.id')))
class A(object):
pass
+
class B(object):
pass
mapper(A, a, properties={
- 'b':relationship(B, backref='a')
+ 'b': relationship(B, backref='a')
})
mapper(B, b, properties={
- 'a':relationship(A, backref='b')
+ 'a': relationship(A, backref='b')
})
assert_raises_message(
a = Table('a', meta, Column('id', Integer, primary_key=True))
b = Table('b', meta, Column('id', Integer, primary_key=True),
- Column('a_id', Integer, ForeignKey('a.id')))
+ Column('a_id', Integer, ForeignKey('a.id')))
class A(object):
pass
+
class B(object):
pass
+
class C(B):
pass
@classmethod
def define_tables(cls, metadata):
Table('graphs', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)))
Table('edges', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('graph_id', Integer,
- ForeignKey('graphs.id')),
- Column('x1', Integer),
- Column('y1', Integer),
- Column('x2', Integer),
- Column('y2', Integer),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('graph_id', Integer,
+ ForeignKey('graphs.id')),
+ Column('x1', Integer),
+ Column('y1', Integer),
+ Column('x2', Integer),
+ Column('y2', Integer))
@classmethod
def setup_mappers(cls):
def __init__(self, x, y):
self.x = x
self.y = y
+
def __composite_values__(self):
return [self.x, self.y]
__hash__ = None
+
def __eq__(self, other):
return isinstance(other, Point) and \
- other.x == self.x and \
- other.y == self.y
+ other.x == self.x and \
+ other.y == self.y
+
def __ne__(self, other):
return not isinstance(other, Point) or \
- not self.__eq__(other)
+ not self.__eq__(other)
class Graph(cls.Comparable):
pass
+
class Edge(cls.Comparable):
def __init__(self, *args):
if args:
self.start, self.end = args
mapper(Graph, graphs, properties={
- 'edges':relationship(Edge)
+ 'edges': relationship(Edge)
})
mapper(Edge, edges, properties={
- 'start':sa.orm.composite(Point, edges.c.x1, edges.c.y1),
+ 'start': sa.orm.composite(Point, edges.c.x1, edges.c.y1),
'end': sa.orm.composite(Point, edges.c.x2, edges.c.y2)
})
def _fixture(self):
Graph, Edge, Point = (self.classes.Graph,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
sess = Session()
g = Graph(id=1, edges=[
def test_round_trip(self):
Graph, Point = self.classes.Graph, self.classes.Point
-
sess = self._fixture()
g1 = sess.query(Graph).first()
def test_detect_change(self):
Graph, Edge, Point = (self.classes.Graph,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
sess = self._fixture()
def test_not_none(self):
Graph, Edge, Point = (self.classes.Graph,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
# current contract. the composite is None
# when hasn't been populated etc. on a
def go():
g2 = sess.query(Graph).\
- options(sa.orm.joinedload('edges')).\
- get(g.id)
+ options(sa.orm.joinedload('edges')).\
+ get(g.id)
eq_(
[(e.start, e.end) for e in g2.edges],
def test_comparator(self):
Graph, Edge, Point = (self.classes.Graph,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
sess = self._fixture()
g = sess.query(Graph).first()
assert sess.query(Edge).\
- filter(Edge.start == Point(3, 4)).one() is \
- g.edges[0]
+ filter(Edge.start == Point(3, 4)).one() is \
+ g.edges[0]
assert sess.query(Edge).\
- filter(Edge.start != Point(3, 4)).first() is \
- g.edges[1]
+ filter(Edge.start != Point(3, 4)).first() is \
+ g.edges[1]
eq_(
- sess.query(Edge).filter(Edge.start == None).all(),
+ sess.query(Edge).filter(Edge.start == None).all(), # noqa
[]
)
def test_comparator_aliased(self):
Graph, Edge, Point = (self.classes.Graph,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
sess = self._fixture()
g = sess.query(Graph).first()
ea = aliased(Edge)
assert sess.query(ea).\
- filter(ea.start != Point(3, 4)).first() is \
- g.edges[1]
+ filter(ea.start != Point(3, 4)).first() is \
+ g.edges[1]
def test_get_history(self):
Edge = self.classes.Edge
from sqlalchemy.orm.attributes import get_history
e1 = Edge()
- e1.start = Point(1,2)
+ e1.start = Point(1, 2)
eq_(
get_history(e1, 'start'),
([Point(x=1, y=2)], (), [Point(x=None, y=None)])
start, end = Edge.start, Edge.end
- row = sess.query(start.label('s1'), end).filter(start == Point(3, 4)).first()
+ row = sess.query(start.label('s1'), end).filter(
+ start == Point(3, 4)).first()
eq_(row.s1.x, 3)
eq_(row.s1.y, 4)
eq_(row.end.x, 5)
e = Edge()
eq_(e.start, None)
+
class PrimaryKeyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('graphs', metadata,
- Column('id', Integer, primary_key=True),
- Column('version_id', Integer, primary_key=True,
- nullable=True),
- Column('name', String(30)))
+ Column('id', Integer, primary_key=True),
+ Column('version_id', Integer, primary_key=True,
+ nullable=True),
+ Column('name', String(30)))
@classmethod
def setup_mappers(cls):
def __init__(self, id, version):
self.id = id
self.version = version
+
def __composite_values__(self):
return (self.id, self.version)
__hash__ = None
+
def __eq__(self, other):
return isinstance(other, Version) and \
- other.id == self.id and \
- other.version == self.version
+ other.id == self.id and \
+ other.version == self.version
+
def __ne__(self, other):
return not self.__eq__(other)
self.version = version
mapper(Graph, graphs, properties={
- 'version':sa.orm.composite(Version, graphs.c.id,
- graphs.c.version_id)})
-
+ 'version': sa.orm.composite(Version, graphs.c.id,
+ graphs.c.version_id)})
def _fixture(self):
Graph, Version = self.classes.Graph, self.classes.Version
def test_get_by_col(self):
Graph = self.classes.Graph
-
sess = self._fixture()
g = sess.query(Graph).first()
g2 = sess.query(Graph).filter_by(version=Version(2, None)).one()
eq_(g.version, g2.version)
+
class DefaultsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foobars', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x1', Integer, default=2),
- Column('x2', Integer),
- Column('x3', Integer, server_default="15"),
- Column('x4', Integer)
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x1', Integer, default=2),
+ Column('x2', Integer),
+ Column('x3', Integer, server_default="15"),
+ Column('x4', Integer))
@classmethod
def setup_mappers(cls):
self.x2 = x2
self.x3 = x3
self.x4 = x4
+
def __composite_values__(self):
return self.goofy_x1, self.x2, self.x3, self.x4
__hash__ = None
+
def __eq__(self, other):
return other.goofy_x1 == self.goofy_x1 and \
- other.x2 == self.x2 and \
- other.x3 == self.x3 and \
- other.x4 == self.x4
+ other.x2 == self.x2 and \
+ other.x3 == self.x3 and \
+ other.x4 == self.x4
+
def __ne__(self, other):
return not self.__eq__(other)
+
def __repr__(self):
return "FBComposite(%r, %r, %r, %r)" % (
self.goofy_x1, self.x2, self.x3, self.x4
)
mapper(Foobar, foobars, properties=dict(
foob=sa.orm.composite(FBComposite,
- foobars.c.x1,
- foobars.c.x2,
- foobars.c.x3,
- foobars.c.x4)
+ foobars.c.x1,
+ foobars.c.x2,
+ foobars.c.x3,
+ foobars.c.x4)
))
def test_attributes_with_defaults(self):
Foobar, FBComposite = self.classes.Foobar, self.classes.FBComposite
-
sess = Session()
f1 = Foobar()
f1.foob = FBComposite(None, 5, None, None)
@classmethod
def define_tables(cls, metadata):
Table('descriptions', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('d1', String(20)),
- Column('d2', String(20)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('d1', String(20)),
+ Column('d2', String(20)))
Table('values', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('description_id', Integer,
- ForeignKey('descriptions.id'),
- nullable=False),
- Column('v1', String(20)),
- Column('v2', String(20)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('description_id', Integer,
+ ForeignKey('descriptions.id'),
+ nullable=False),
+ Column('v1', String(20)),
+ Column('v2', String(20)))
@classmethod
def setup_mappers(cls):
mapper(Descriptions, descriptions, properties={
'values': relationship(Values, lazy='dynamic'),
'custom_descriptions': composite(
- CustomValues,
- descriptions.c.d1,
- descriptions.c.d2),
+ CustomValues,
+ descriptions.c.d1,
+ descriptions.c.d2),
})
mapper(Values, desc_values, properties={
'custom_values': composite(CustomValues,
- desc_values.c.v1,
- desc_values.c.v2),
+ desc_values.c.v1,
+ desc_values.c.v2),
})
def test_set_composite_attrs_via_selectable(self):
- Values, CustomValues, values, Descriptions, descriptions = (self.classes.Values,
- self.classes.CustomValues,
- self.tables.values,
- self.classes.Descriptions,
- self.tables.descriptions)
+ Values, CustomValues, values, Descriptions, descriptions = \
+ (self.classes.Values,
+ self.classes.CustomValues,
+ self.tables.values,
+ self.classes.Descriptions,
+ self.tables.descriptions)
session = Session()
d = Descriptions(
- custom_descriptions = CustomValues('Color', 'Number'),
- values =[
- Values(custom_values = CustomValues('Red', '5')),
+ custom_descriptions=CustomValues('Color', 'Number'),
+ values=[
+ Values(custom_values=CustomValues('Red', '5')),
Values(custom_values=CustomValues('Blue', '1'))
]
)
[(1, 1, 'Red', '5'), (2, 1, 'Blue', '1')]
)
+
class ManyToOneTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('a',
- metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('b1', String(20)),
- Column('b2_id', Integer, ForeignKey('b.id'))
- )
+ metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('b1', String(20)),
+ Column('b2_id', Integer, ForeignKey('b.id')))
Table('b', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(20))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(20)))
@classmethod
def setup_mappers(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
other.b1 == self.b1 and \
other.b2 == self.b2
-
mapper(A, a, properties={
- 'b2':relationship(B),
- 'c':composite(C, 'b1', 'b2')
+ 'b2': relationship(B),
+ 'c': composite(C, 'b1', 'b2')
})
mapper(B, b)
A = self.classes.A
A.c.__clause_element__()
-
def test_persist(self):
A, C, B = (self.classes.A,
- self.classes.C,
- self.classes.B)
+ self.classes.C,
+ self.classes.B)
sess = Session()
sess.add(A(c=C('b1', B(data='b2'))))
def test_query(self):
A, C, B = (self.classes.A,
- self.classes.C,
- self.classes.B)
+ self.classes.C,
+ self.classes.B)
sess = Session()
b1, b2 = B(data='b1'), B(data='b2')
def test_query_aliased(self):
A, C, B = (self.classes.A,
- self.classes.C,
- self.classes.B)
+ self.classes.C,
+ self.classes.B)
sess = Session()
b1, b2 = B(data='b1'), B(data='b2')
a2
)
+
class ConfigurationTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('edge', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x1', Integer),
- Column('y1', Integer),
- Column('x2', Integer),
- Column('y2', Integer),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x1', Integer),
+ Column('y1', Integer),
+ Column('x2', Integer),
+ Column('y2', Integer))
@classmethod
def setup_mappers(cls):
def __init__(self, x, y):
self.x = x
self.y = y
+
def __composite_values__(self):
return [self.x, self.y]
+
def __eq__(self, other):
return isinstance(other, Point) and \
- other.x == self.x and \
- other.y == self.y
+ other.x == self.x and \
+ other.y == self.y
+
def __ne__(self, other):
return not isinstance(other, Point) or \
not self.__eq__(other)
def test_columns(self):
edge, Edge, Point = (self.tables.edge,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
mapper(Edge, edge, properties={
- 'start':sa.orm.composite(Point, edge.c.x1, edge.c.y1),
+ 'start': sa.orm.composite(Point, edge.c.x1, edge.c.y1),
'end': sa.orm.composite(Point, edge.c.x2, edge.c.y2)
})
def test_attributes(self):
edge, Edge, Point = (self.tables.edge,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
m = mapper(Edge, edge)
m.add_property('start', sa.orm.composite(Point, Edge.x1, Edge.y1))
def test_strings(self):
edge, Edge, Point = (self.tables.edge,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
m = mapper(Edge, edge)
m.add_property('start', sa.orm.composite(Point, 'x1', 'y1'))
def test_deferred(self):
edge, Edge, Point = (self.tables.edge,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
mapper(Edge, edge, properties={
- 'start':sa.orm.composite(Point, edge.c.x1, edge.c.y1,
- deferred=True, group='s'),
+ 'start': sa.orm.composite(Point, edge.c.x1, edge.c.y1,
+ deferred=True, group='s'),
'end': sa.orm.composite(Point, edge.c.x2, edge.c.y2,
- deferred=True)
+ deferred=True)
})
self._test_roundtrip()
def test_check_prop_type(self):
edge, Edge, Point = (self.tables.edge,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
mapper(Edge, edge, properties={
'start': sa.orm.composite(Point, (edge.c.x1,), edge.c.y1),
})
configure_mappers
)
+
class ComparatorTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('edge', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x1', Integer),
- Column('y1', Integer),
- Column('x2', Integer),
- Column('y2', Integer),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x1', Integer),
+ Column('y1', Integer),
+ Column('x2', Integer),
+ Column('y2', Integer))
@classmethod
def setup_mappers(cls):
def __init__(self, x, y):
self.x = x
self.y = y
+
def __composite_values__(self):
return [self.x, self.y]
+
def __eq__(self, other):
return isinstance(other, Point) and \
- other.x == self.x and \
- other.y == self.y
+ other.x == self.x and \
+ other.y == self.y
+
def __ne__(self, other):
return not isinstance(other, Point) or \
not self.__eq__(other)
def __eq__(self, other):
return isinstance(other, Edge) and \
- other.id == self.id
+ other.id == self.id
def _fixture(self, custom):
edge, Edge, Point = (self.tables.edge,
- self.classes.Edge,
- self.classes.Point)
+ self.classes.Edge,
+ self.classes.Point)
if custom:
class CustomComparator(sa.orm.CompositeProperty.Comparator):
def _test_comparator_behavior(self):
Edge, Point = (self.classes.Edge,
- self.classes.Point)
+ self.classes.Point)
sess = Session()
e1 = Edge(Point(3, 4), Point(5, 6))
sess.commit()
assert sess.query(Edge).\
- filter(Edge.start==Point(3, 4)).one() is \
- e1
+ filter(Edge.start == Point(3, 4)).one() is \
+ e1
assert sess.query(Edge).\
- filter(Edge.start!=Point(3, 4)).first() is \
- e2
+ filter(Edge.start != Point(3, 4)).first() is \
+ e2
eq_(
- sess.query(Edge).filter(Edge.start==None).all(),
+ sess.query(Edge).filter(Edge.start == None).all(), # noqa
[]
)
def test_custom_comparator_factory(self):
self._fixture(True)
Edge, Point = (self.classes.Edge,
- self.classes.Point)
+ self.classes.Point)
edge_1, edge_2 = Edge(Point(0, 0), Point(3, 5)), \
- Edge(Point(0, 1), Point(3, 5))
+ Edge(Point(0, 1), Point(3, 5))
sess = Session()
sess.add_all([edge_1, edge_2])
"SELECT edge.id, edge.x1, edge.y1, edge.x2, edge.y2 FROM edge "
"ORDER BY edge.x1, edge.y1"
)
-
from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, backref, \
- create_session, sessionmaker
+ create_session, sessionmaker
from sqlalchemy.testing import eq_, is_
from sqlalchemy.testing.assertsql import RegexSQL, CompiledSQL, AllOf
from sqlalchemy.testing import fixtures
def test_single(self):
C1, t1 = self.classes.C1, self.tables.t1
- mapper(C1, t1, properties = {
- 'c1s':relationship(C1, cascade="all"),
- 'parent':relationship(C1,
- primaryjoin=t1.c.parent_c1 == t1.c.c1,
- remote_side=t1.c.c1,
- lazy='select',
- uselist=False)})
+ mapper(C1, t1, properties={
+ 'c1s': relationship(C1, cascade="all"),
+ 'parent': relationship(C1,
+ primaryjoin=t1.c.parent_c1 == t1.c.c1,
+ remote_side=t1.c.c1,
+ lazy='select',
+ uselist=False)})
a = C1('head c1')
a.c1s.append(C1('another c1'))
- sess = create_session( )
+ sess = create_session()
sess.add(a)
sess.flush()
sess.delete(a)
C1, t1 = self.classes.C1, self.tables.t1
mapper(C1, t1, properties={
- 'parent':relationship(C1,
- primaryjoin=t1.c.parent_c1 == t1.c.c1,
- remote_side=t1.c.c1)})
+ 'parent': relationship(C1,
+ primaryjoin=t1.c.parent_c1 == t1.c.c1,
+ remote_side=t1.c.c1)})
c1 = C1()
c2.parent = c1
sess.add(c2)
sess.flush()
- assert c2.parent_c1==c1.c1
+ assert c2.parent_c1 == c1.c1
def test_cycle(self):
C2, C1, t2, t1 = (self.classes.C2,
- self.classes.C1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.C1,
+ self.tables.t2,
+ self.tables.t1)
- mapper(C1, t1, properties = {
- 'c1s' : relationship(C1, cascade="all"),
- 'c2s' : relationship(mapper(C2, t2), cascade="all, delete-orphan")})
+ mapper(C1, t1, properties={
+ 'c1s': relationship(C1, cascade="all"),
+ 'c2s': relationship(mapper(C2, t2), cascade="all, delete-orphan")})
a = C1('head c1')
a.c1s.append(C1('child1'))
a.c1s[0].c1s.append(C1('subchild2'))
a.c1s[1].c2s.append(C2('child2 data1'))
a.c1s[1].c2s.append(C2('child2 data2'))
- sess = create_session( )
+ sess = create_session()
sess.add(a)
sess.flush()
C1, t1 = self.classes.C1, self.tables.t1
mapper(C1, t1, properties={
- 'children':relationship(C1)
+ 'children': relationship(C1)
})
sess = create_session()
sess.expire_all()
assert c2.parent_c1 is None
+
class SelfReferentialNoPKTest(fixtures.MappedTest):
- """A self-referential relationship that joins on a column other than the primary key column"""
+ """A self-referential relationship that joins on a column other than the
+ primary key column"""
@classmethod
def define_tables(cls, metadata):
Table('item', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('uuid', String(32), unique=True, nullable=False),
- Column('parent_uuid', String(32), ForeignKey('item.uuid'),
- nullable=True))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('uuid', String(32), unique=True, nullable=False),
+ Column('parent_uuid', String(32), ForeignKey('item.uuid'),
+ nullable=True))
@classmethod
def setup_classes(cls):
@classmethod
def define_tables(cls, metadata):
Table("parent", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("parent_data", String(50)),
- Column("type", String(10)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("parent_data", String(50)),
+ Column("type", String(10)))
Table("child1", metadata,
Column("id", Integer, ForeignKey("parent.id"), primary_key=True),
Column("child1_data", String(50)))
Table("child2", metadata,
- Column("id", Integer, ForeignKey("parent.id"), primary_key=True),
- Column("child1_id", Integer, ForeignKey("child1.id"),
- nullable=False),
- Column("child2_data", String(50)))
+ Column("id", Integer, ForeignKey("parent.id"), primary_key=True),
+ Column("child1_id", Integer, ForeignKey("child1.id"),
+ nullable=False),
+ Column("child2_data", String(50)))
@classmethod
def setup_classes(cls):
@classmethod
def setup_mappers(cls):
child1, child2, parent, Parent, Child1, Child2 = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.parent,
- cls.classes.Parent,
- cls.classes.Child1,
- cls.classes.Child2)
+ cls.tables.child2,
+ cls.tables.parent,
+ cls.classes.Parent,
+ cls.classes.Child1,
+ cls.classes.Child2)
mapper(Parent, parent)
mapper(Child1, child1, inherits=Parent)
mapper(Child2, child2, inherits=Parent, properties=dict(
- child1=relationship(Child1,
- primaryjoin=child2.c.child1_id == child1.c.id)))
+ child1=relationship(
+ Child1,
+ primaryjoin=child2.c.child1_id == child1.c.id)))
def test_many_to_one_only(self):
"""test similar to SelfReferentialTest.testmanytooneonly"""
Child1, Child2 = self.classes.Child1, self.classes.Child2
-
session = create_session()
c1 = Child1()
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('cid', Integer, ForeignKey('c.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('cid', Integer, ForeignKey('c.id')))
Table('b', metadata,
- Column('id', Integer, ForeignKey("a.id"), primary_key=True),
- )
+ Column('id', Integer, ForeignKey("a.id"), primary_key=True))
Table('c', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('aid', Integer,
- ForeignKey('a.id', name="foo")))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('aid', Integer,
+ ForeignKey('a.id', name="foo")))
@classmethod
def setup_classes(cls):
def test_flush(self):
a, A, c, b, C, B = (self.tables.a,
- self.classes.A,
- self.tables.c,
- self.tables.b,
- self.classes.C,
- self.classes.B)
+ self.classes.A,
+ self.tables.c,
+ self.tables.b,
+ self.classes.C,
+ self.classes.B)
mapper(A, a, properties={
- 'cs':relationship(C, primaryjoin=a.c.cid==c.c.id)})
+ 'cs': relationship(C, primaryjoin=a.c.cid == c.c.id)})
mapper(B, b, inherits=A, inherit_condition=b.c.id == a.c.id)
mapper(C, c, properties={
- 'arel':relationship(A, primaryjoin=a.c.id == c.c.aid)})
+ 'arel': relationship(A, primaryjoin=a.c.id == c.c.aid)})
sess = create_session()
bobj = B()
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30)),
- Column('t2id', Integer, ForeignKey('t2.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)),
+ Column('t2id', Integer, ForeignKey('t2.id')))
Table('t2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30)),
- Column('t1id', Integer,
- ForeignKey('t1.id', name="foo_fk")))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)),
+ Column('t1id', Integer,
+ ForeignKey('t1.id', name="foo_fk")))
Table('t3', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30)),
- Column('t1id', Integer, ForeignKey('t1.id'), nullable=False),
- Column('t2id', Integer, ForeignKey('t2.id'), nullable=False))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)),
+ Column('t1id', Integer, ForeignKey('t1.id'), nullable=False),
+ Column('t2id', Integer, ForeignKey('t2.id'), nullable=False))
@classmethod
def setup_classes(cls):
class T1(cls.Basic):
pass
+
class T2(cls.Basic):
pass
+
class T3(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
t2, T2, T3, t1, t3, T1 = (cls.tables.t2,
- cls.classes.T2,
- cls.classes.T3,
- cls.tables.t1,
- cls.tables.t3,
- cls.classes.T1)
+ cls.classes.T2,
+ cls.classes.T3,
+ cls.tables.t1,
+ cls.tables.t3,
+ cls.classes.T1)
mapper(T1, t1, properties={
- 't2':relationship(T2, primaryjoin=t1.c.t2id == t2.c.id)})
+ 't2': relationship(T2, primaryjoin=t1.c.t2id == t2.c.id)})
mapper(T2, t2, properties={
- 't1':relationship(T1, primaryjoin=t2.c.t1id == t1.c.id)})
+ 't1': relationship(T1, primaryjoin=t2.c.t1id == t1.c.id)})
mapper(T3, t3, properties={
- 't1':relationship(T1),
- 't2':relationship(T2)})
+ 't1': relationship(T1),
+ 't2': relationship(T2)})
def test_reflush(self):
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
o1 = T1()
o1.t2 = T2()
"""A variant on test_reflush()"""
T2, T3, T1 = (self.classes.T2,
- self.classes.T3,
- self.classes.T1)
+ self.classes.T3,
+ self.classes.T1)
o1 = T1()
o1.t2 = T2()
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('c2', Integer, ForeignKey('t2.c1')))
Table('t2', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('c2', Integer,
ForeignKey('t1.c1', name='t1c1_fk')))
def test_cycle(self):
C2, C1, t2, t1 = (self.classes.C2,
- self.classes.C1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.C1,
+ self.tables.t2,
+ self.tables.t1)
mapper(C2, t2, properties={
'c1s': relationship(C1,
- primaryjoin=t2.c.c1 == t1.c.c2,
- uselist=True)})
+ primaryjoin=t2.c.c1 == t1.c.c2,
+ uselist=True)})
mapper(C1, t1, properties={
'c2s': relationship(C2,
- primaryjoin=t1.c.c1 == t2.c.c2,
- uselist=True)})
+ primaryjoin=t1.c.c1 == t2.c.c2,
+ uselist=True)})
a = C1()
b = C2()
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('c2', Integer, ForeignKey('t2.c1')),
test_needs_autoincrement=True)
Table('t2', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('c2', Integer,
ForeignKey('t1.c1', name='t1c1_fq')),
test_needs_autoincrement=True)
Table('t1_data', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('t1id', Integer, ForeignKey('t1.c1')),
Column('data', String(20)),
test_needs_autoincrement=True)
@classmethod
def setup_mappers(cls):
t2, t1, C1Data, t1_data, C2, C1 = (cls.tables.t2,
- cls.tables.t1,
- cls.classes.C1Data,
- cls.tables.t1_data,
- cls.classes.C2,
- cls.classes.C1)
+ cls.tables.t1,
+ cls.classes.C1Data,
+ cls.tables.t1_data,
+ cls.classes.C2,
+ cls.classes.C1)
mapper(C2, t2, properties={
'c1s': relationship(C1,
- primaryjoin=t2.c.c1 == t1.c.c2,
- uselist=True)})
+ primaryjoin=t2.c.c1 == t1.c.c2,
+ uselist=True)})
mapper(C1, t1, properties={
'c2s': relationship(C2,
- primaryjoin=t1.c.c1 == t2.c.c2,
- uselist=True),
+ primaryjoin=t1.c.c1 == t2.c.c2,
+ uselist=True),
'data': relationship(mapper(C1Data, t1_data))})
def test_cycle(self):
C2, C1, C1Data = (self.classes.C2,
- self.classes.C1,
- self.classes.C1Data)
+ self.classes.C1,
+ self.classes.C1Data)
a = C1()
b = C2()
sess.delete(c)
sess.flush()
+
class OneToManyManyToOneTest(fixtures.MappedTest):
"""
@classmethod
def define_tables(cls, metadata):
Table('ball', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('person_id', Integer,
ForeignKey('person.id', name='fk_person_id')),
Column('data', String(30)))
Table('person', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('favorite_ball_id', Integer, ForeignKey('ball.id')),
Column('data', String(30)))
"""
person, ball, Ball, Person = (self.tables.person,
- self.tables.ball,
- self.classes.Ball,
- self.classes.Person)
+ self.tables.ball,
+ self.classes.Ball,
+ self.classes.Person)
mapper(Ball, ball)
mapper(Person, person, properties=dict(
balls=relationship(Ball,
- primaryjoin=ball.c.person_id == person.c.id,
- remote_side=ball.c.person_id),
- favorite=relationship(Ball,
- primaryjoin=person.c.favorite_ball_id == ball.c.id,
- remote_side=ball.c.id)))
+ primaryjoin=ball.c.person_id == person.c.id,
+ remote_side=ball.c.person_id),
+ favorite=relationship(
+ Ball,
+ primaryjoin=person.c.favorite_ball_id == ball.c.id,
+ remote_side=ball.c.id)))
b = Ball()
p = Person()
"""A cycle between two rows, with a post_update on the many-to-one"""
person, ball, Ball, Person = (self.tables.person,
- self.tables.ball,
- self.classes.Ball,
- self.classes.Person)
+ self.tables.ball,
+ self.classes.Ball,
+ self.classes.Person)
mapper(Ball, ball)
mapper(Person, person, properties=dict(
balls=relationship(Ball,
- primaryjoin=ball.c.person_id == person.c.id,
- remote_side=ball.c.person_id,
- post_update=False,
- cascade="all, delete-orphan"),
- favorite=relationship(Ball,
- primaryjoin=person.c.favorite_ball_id == ball.c.id,
- remote_side=person.c.favorite_ball_id,
- post_update=True)))
+ primaryjoin=ball.c.person_id == person.c.id,
+ remote_side=ball.c.person_id,
+ post_update=False,
+ cascade="all, delete-orphan"),
+ favorite=relationship(
+ Ball,
+ primaryjoin=person.c.favorite_ball_id == ball.c.id,
+ remote_side=person.c.favorite_ball_id,
+ post_update=True)))
b = Ball(data='some data')
p = Person(data='some data')
self.assert_sql_execution(
testing.db,
sess.flush,
- RegexSQL("^INSERT INTO person", {'data':'some data'}),
- RegexSQL("^INSERT INTO ball", lambda c: {'person_id':p.id, 'data':'some data'}),
- RegexSQL("^INSERT INTO ball", lambda c: {'person_id':p.id, 'data':'some data'}),
- RegexSQL("^INSERT INTO ball", lambda c: {'person_id':p.id, 'data':'some data'}),
- RegexSQL("^INSERT INTO ball", lambda c: {'person_id':p.id, 'data':'some data'}),
+ RegexSQL("^INSERT INTO person", {'data': 'some data'}),
+ RegexSQL("^INSERT INTO ball", lambda c: {
+ 'person_id': p.id, 'data': 'some data'}),
+ RegexSQL("^INSERT INTO ball", lambda c: {
+ 'person_id': p.id, 'data': 'some data'}),
+ RegexSQL("^INSERT INTO ball", lambda c: {
+ 'person_id': p.id, 'data': 'some data'}),
+ RegexSQL("^INSERT INTO ball", lambda c: {
+ 'person_id': p.id, 'data': 'some data'}),
CompiledSQL("UPDATE person SET favorite_ball_id=:favorite_ball_id "
"WHERE person.id = :person_id",
- lambda ctx:{'favorite_ball_id':p.favorite.id, 'person_id':p.id}
- ),
+ lambda ctx: {
+ 'favorite_ball_id': p.favorite.id,
+ 'person_id': p.id}
+ ),
)
sess.delete(p)
testing.db,
sess.flush,
CompiledSQL("UPDATE person SET favorite_ball_id=:favorite_ball_id "
- "WHERE person.id = :person_id",
- lambda ctx: {'person_id': p.id, 'favorite_ball_id': None}),
- CompiledSQL("DELETE FROM ball WHERE ball.id = :id", None), # lambda ctx:[{'id': 1L}, {'id': 4L}, {'id': 3L}, {'id': 2L}])
- CompiledSQL("DELETE FROM person WHERE person.id = :id", lambda ctx:[{'id': p.id}])
+ "WHERE person.id = :person_id",
+ lambda ctx: {'person_id': p.id,
+ 'favorite_ball_id': None}),
+ # lambda ctx:[{'id': 1L}, {'id': 4L}, {'id': 3L}, {'id': 2L}])
+ CompiledSQL("DELETE FROM ball WHERE ball.id = :id", None),
+ CompiledSQL("DELETE FROM person WHERE person.id = :id",
+ lambda ctx: [{'id': p.id}])
)
def test_post_update_backref(self):
"""test bidirectional post_update."""
person, ball, Ball, Person = (self.tables.person,
- self.tables.ball,
- self.classes.Ball,
- self.classes.Person)
-
+ self.tables.ball,
+ self.classes.Ball,
+ self.classes.Person)
mapper(Ball, ball)
mapper(Person, person, properties=dict(
balls=relationship(Ball,
- primaryjoin=ball.c.person_id == person.c.id,
- remote_side=ball.c.person_id, post_update=True,
- backref=backref('person', post_update=True)
- ),
- favorite=relationship(Ball,
- primaryjoin=person.c.favorite_ball_id == ball.c.id,
- remote_side=person.c.favorite_ball_id)
-
- ))
+ primaryjoin=ball.c.person_id == person.c.id,
+ remote_side=ball.c.person_id, post_update=True,
+ backref=backref('person', post_update=True)
+ ),
+ favorite=relationship(
+ Ball,
+ primaryjoin=person.c.favorite_ball_id == ball.c.id,
+ remote_side=person.c.favorite_ball_id)
+ ))
sess = sessionmaker()()
p1 = Person(data='p1')
p3, b1.person
)
-
-
def test_post_update_o2m(self):
"""A cycle between two rows, with a post_update on the one-to-many"""
person, ball, Ball, Person = (self.tables.person,
- self.tables.ball,
- self.classes.Ball,
- self.classes.Person)
-
+ self.tables.ball,
+ self.classes.Ball,
+ self.classes.Person)
mapper(Ball, ball)
mapper(Person, person, properties=dict(
balls=relationship(Ball,
- primaryjoin=ball.c.person_id == person.c.id,
- remote_side=ball.c.person_id,
- cascade="all, delete-orphan",
- post_update=True,
- backref='person'),
- favorite=relationship(Ball,
- primaryjoin=person.c.favorite_ball_id == ball.c.id,
- remote_side=person.c.favorite_ball_id)))
+ primaryjoin=ball.c.person_id == person.c.id,
+ remote_side=ball.c.person_id,
+ cascade="all, delete-orphan",
+ post_update=True,
+ backref='person'),
+ favorite=relationship(
+ Ball,
+ primaryjoin=person.c.favorite_ball_id == ball.c.id,
+ remote_side=person.c.favorite_ball_id)))
b = Ball(data='some data')
p = Person(data='some data')
p.balls.append(b4)
p.favorite = b
sess = create_session()
- sess.add_all((b,p,b2,b3,b4))
+ sess.add_all((b, p, b2, b3, b4))
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL("INSERT INTO ball (person_id, data) "
- "VALUES (:person_id, :data)",
- {'person_id':None, 'data':'some data'}),
+ "VALUES (:person_id, :data)",
+ {'person_id': None, 'data': 'some data'}),
CompiledSQL("INSERT INTO ball (person_id, data) "
- "VALUES (:person_id, :data)",
- {'person_id':None, 'data':'some data'}),
+ "VALUES (:person_id, :data)",
+ {'person_id': None, 'data': 'some data'}),
CompiledSQL("INSERT INTO ball (person_id, data) "
- "VALUES (:person_id, :data)",
- {'person_id':None, 'data':'some data'}),
+ "VALUES (:person_id, :data)",
+ {'person_id': None, 'data': 'some data'}),
CompiledSQL("INSERT INTO ball (person_id, data) "
- "VALUES (:person_id, :data)",
- {'person_id':None, 'data':'some data'}),
+ "VALUES (:person_id, :data)",
+ {'person_id': None, 'data': 'some data'}),
CompiledSQL("INSERT INTO person (favorite_ball_id, data) "
- "VALUES (:favorite_ball_id, :data)",
- lambda ctx:{'favorite_ball_id':b.id, 'data':'some data'}),
-
- CompiledSQL("UPDATE ball SET person_id=:person_id "
- "WHERE ball.id = :ball_id",
- lambda ctx:[
- {'person_id':p.id,'ball_id':b.id},
- {'person_id':p.id,'ball_id':b2.id},
- {'person_id':p.id,'ball_id':b3.id},
- {'person_id':p.id,'ball_id':b4.id}
- ]
- ),
+ "VALUES (:favorite_ball_id, :data)",
+ lambda ctx: {'favorite_ball_id': b.id,
+ 'data': 'some data'}),
+ CompiledSQL("UPDATE ball SET person_id=:person_id "
+ "WHERE ball.id = :ball_id",
+ lambda ctx: [
+ {'person_id': p.id, 'ball_id': b.id},
+ {'person_id': p.id, 'ball_id': b2.id},
+ {'person_id': p.id, 'ball_id': b3.id},
+ {'person_id': p.id, 'ball_id': b4.id}
+ ]),
)
sess.delete(p)
self.assert_sql_execution(testing.db, sess.flush,
- CompiledSQL("UPDATE ball SET person_id=:person_id "
- "WHERE ball.id = :ball_id",
- lambda ctx:[
- {'person_id': None, 'ball_id': b.id},
- {'person_id': None, 'ball_id': b2.id},
- {'person_id': None, 'ball_id': b3.id},
- {'person_id': None, 'ball_id': b4.id}
- ]
- ),
- CompiledSQL("DELETE FROM person WHERE person.id = :id",
- lambda ctx:[{'id':p.id}]),
-
- CompiledSQL("DELETE FROM ball WHERE ball.id = :id",
- lambda ctx:[{'id': b.id},
- {'id': b2.id},
- {'id': b3.id},
- {'id': b4.id}])
- )
+ CompiledSQL(
+ "UPDATE ball SET person_id=:person_id "
+ "WHERE ball.id = :ball_id",
+ lambda ctx: [
+ {'person_id': None,
+ 'ball_id': b.id},
+ {'person_id': None,
+ 'ball_id': b2.id},
+ {'person_id': None,
+ 'ball_id': b3.id},
+ {'person_id': None,
+ 'ball_id': b4.id}
+ ]
+ ),
+ CompiledSQL(
+ "DELETE FROM person "
+ "WHERE person.id = :id",
+ lambda ctx: [{'id': p.id}]),
+
+ CompiledSQL(
+ "DELETE FROM ball WHERE ball.id = :id",
+ lambda ctx: [{'id': b.id},
+ {'id': b2.id},
+ {'id': b3.id},
+ {'id': b4.id}])
+ )
def test_post_update_m2o_detect_none(self):
person, ball, Ball, Person = (
testing.db,
sess.flush,
CompiledSQL(
- "UPDATE ball SET person_id=:person_id WHERE ball.id = :ball_id",
+ "UPDATE ball SET person_id=:person_id "
+ "WHERE ball.id = :ball_id",
lambda ctx: {'person_id': None, 'ball_id': b1.id})
)
node, Node = self.tables.node, self.classes.Node
-
mapper(Node, node, properties={
'children': relationship(
Node,
- primaryjoin=node.c.id==node.c.parent_id,
+ primaryjoin=node.c.id == node.c.parent_id,
cascade="all",
backref=backref("parent", remote_side=node.c.id)
),
'prev_sibling': relationship(
Node,
- primaryjoin=node.c.prev_sibling_id==node.c.id,
+ primaryjoin=node.c.prev_sibling_id == node.c.id,
remote_side=node.c.id,
uselist=False),
'next_sibling': relationship(
Node,
- primaryjoin=node.c.next_sibling_id==node.c.id,
+ primaryjoin=node.c.next_sibling_id == node.c.id,
remote_side=node.c.id,
uselist=False,
post_update=True)})
testing.db,
session.flush,
AllOf(
- CompiledSQL("UPDATE node SET prev_sibling_id=:prev_sibling_id "
- "WHERE node.id = :node_id",
- lambda ctx:{'prev_sibling_id':about.id, 'node_id':stories.id}),
-
- CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
- "WHERE node.id = :node_id",
- lambda ctx:{'next_sibling_id':stories.id, 'node_id':about.id}),
-
- CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
- "WHERE node.id = :node_id",
- lambda ctx:{'next_sibling_id':None, 'node_id':cats.id}),
+ CompiledSQL("UPDATE node SET prev_sibling_id=:prev_sibling_id "
+ "WHERE node.id = :node_id",
+ lambda ctx: {'prev_sibling_id': about.id,
+ 'node_id': stories.id}),
+
+ CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
+ "WHERE node.id = :node_id",
+ lambda ctx: {'next_sibling_id': stories.id,
+ 'node_id': about.id}),
+
+ CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
+ "WHERE node.id = :node_id",
+ lambda ctx: {'next_sibling_id': None,
+ 'node_id': cats.id}),
),
CompiledSQL("DELETE FROM node WHERE node.id = :id",
- lambda ctx:[{'id':cats.id}])
+ lambda ctx: [{'id': cats.id}])
)
session.delete(root)
testing.db,
session.flush,
CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
- "WHERE node.id = :node_id",
- lambda ctx: [
+ "WHERE node.id = :node_id",
+ lambda ctx: [
{'node_id': about.id, 'next_sibling_id': None},
{'node_id': stories.id, 'next_sibling_id': None}
]
- ),
+ ),
AllOf(
CompiledSQL("DELETE FROM node WHERE node.id = :id",
- lambda ctx:{'id':about.id}
- ),
+ lambda ctx: {'id': about.id}
+ ),
CompiledSQL("DELETE FROM node WHERE node.id = :id",
- lambda ctx:{'id':stories.id}
- ),
+ lambda ctx: {'id': stories.id}
+ ),
CompiledSQL("DELETE FROM node WHERE node.id = :id",
- lambda ctx:{'id':bruce.id}
- ),
+ lambda ctx: {'id': bruce.id}
+ ),
),
CompiledSQL("DELETE FROM node WHERE node.id = :id",
- lambda ctx:{'id':root.id}
- ),
+ lambda ctx: {'id': root.id}
+ ),
)
about = Node('about')
cats = Node('cats')
cats.prev_sibling = None
session.flush()
+
class SelfReferentialPostUpdateTest2(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("a_table", metadata,
- Column("id", Integer(), primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer(), primary_key=True,
+ test_needs_autoincrement=True),
Column("fui", String(128)),
Column("b", Integer(), ForeignKey("a_table.id")))
A, a_table = self.classes.A, self.tables.a_table
-
mapper(A, a_table, properties={
'foo': relationship(A,
- remote_side=[a_table.c.id],
- post_update=True)})
+ remote_side=[a_table.c.id],
+ post_update=True)})
session = create_session()
ForeignKey('child.id', name='c1'), nullable=True))
Table('child', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50), nullable=False),
- Column('child_id', Integer,
- ForeignKey('child.id')),
- Column('parent_id', Integer,
- ForeignKey('parent.id'), nullable=True))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50), nullable=False),
+ Column('child_id', Integer,
+ ForeignKey('child.id')),
+ Column('parent_id', Integer,
+ ForeignKey('parent.id'), nullable=True))
@classmethod
def setup_classes(cls):
def test_one(self):
Child, Parent, parent, child = (self.classes.Child,
- self.classes.Parent,
- self.tables.parent,
- self.tables.child)
+ self.classes.Parent,
+ self.tables.parent,
+ self.tables.child)
mapper(Parent, parent, properties={
- 'children':relationship(Child, primaryjoin=parent.c.id==child.c.parent_id),
- 'child':relationship(Child, primaryjoin=parent.c.child_id==child.c.id, post_update=True)
+ 'children': relationship(
+ Child,
+ primaryjoin=parent.c.id == child.c.parent_id),
+ 'child': relationship(
+ Child,
+ primaryjoin=parent.c.child_id == child.c.id, post_update=True)
})
mapper(Child, child, properties={
- 'parent':relationship(Child, remote_side=child.c.id)
+ 'parent': relationship(Child, remote_side=child.c.id)
})
session = create_session()
p1 = Parent('p1')
c1 = Child('c1')
c2 = Child('c2')
- p1.children =[c1, c2]
+ p1.children = [c1, c2]
c2.parent = c1
p1.child = c2
p2.child = None
session.flush()
+
class PostUpdateBatchingTest(fixtures.MappedTest):
- """test that lots of post update cols batch together into a single UPDATE."""
+ """test that lots of post update cols batch together into a single UPDATE.
+ """
@classmethod
def define_tables(cls, metadata):
Column('c1_id', Integer,
ForeignKey('child1.id', name='c1'), nullable=True),
Column('c2_id', Integer,
- ForeignKey('child2.id', name='c2'), nullable=True),
+ ForeignKey('child2.id', name='c2'), nullable=True),
Column('c3_id', Integer,
- ForeignKey('child3.id', name='c3'), nullable=True)
- )
+ ForeignKey('child3.id', name='c3'), nullable=True)
+ )
Table('child1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50), nullable=False),
- Column('parent_id', Integer,
- ForeignKey('parent.id'), nullable=False))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50), nullable=False),
+ Column('parent_id', Integer,
+ ForeignKey('parent.id'), nullable=False))
Table('child2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50), nullable=False),
- Column('parent_id', Integer,
- ForeignKey('parent.id'), nullable=False))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50), nullable=False),
+ Column('parent_id', Integer,
+ ForeignKey('parent.id'), nullable=False))
Table('child3', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50), nullable=False),
- Column('parent_id', Integer,
- ForeignKey('parent.id'), nullable=False))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50), nullable=False),
+ Column('parent_id', Integer,
+ ForeignKey('parent.id'), nullable=False))
@classmethod
def setup_classes(cls):
class Parent(cls.Basic):
def __init__(self, name=''):
self.name = name
+
class Child1(cls.Basic):
def __init__(self, name=''):
self.name = name
+
class Child2(cls.Basic):
def __init__(self, name=''):
self.name = name
+
class Child3(cls.Basic):
def __init__(self, name=''):
self.name = name
def test_one(self):
- child1, child2, child3, Parent, parent, Child1, Child2, Child3 = (self.tables.child1,
- self.tables.child2,
- self.tables.child3,
- self.classes.Parent,
- self.tables.parent,
- self.classes.Child1,
- self.classes.Child2,
- self.classes.Child3)
+ child1, child2, child3, Parent, parent, Child1, Child2, Child3 = (
+ self.tables.child1,
+ self.tables.child2,
+ self.tables.child3,
+ self.classes.Parent,
+ self.tables.parent,
+ self.classes.Child1,
+ self.classes.Child2,
+ self.classes.Child3)
mapper(Parent, parent, properties={
- 'c1s':relationship(Child1, primaryjoin=child1.c.parent_id==parent.c.id),
- 'c2s':relationship(Child2, primaryjoin=child2.c.parent_id==parent.c.id),
- 'c3s':relationship(Child3, primaryjoin=child3.c.parent_id==parent.c.id),
-
- 'c1':relationship(Child1, primaryjoin=child1.c.id==parent.c.c1_id, post_update=True),
- 'c2':relationship(Child2, primaryjoin=child2.c.id==parent.c.c2_id, post_update=True),
- 'c3':relationship(Child3, primaryjoin=child3.c.id==parent.c.c3_id, post_update=True),
+ 'c1s': relationship(
+ Child1,
+ primaryjoin=child1.c.parent_id == parent.c.id),
+ 'c2s': relationship(
+ Child2,
+ primaryjoin=child2.c.parent_id == parent.c.id),
+ 'c3s': relationship(
+ Child3, primaryjoin=child3.c.parent_id == parent.c.id),
+
+ 'c1': relationship(
+ Child1,
+ primaryjoin=child1.c.id == parent.c.c1_id, post_update=True),
+ 'c2': relationship(
+ Child2,
+ primaryjoin=child2.c.id == parent.c.c2_id, post_update=True),
+ 'c3': relationship(
+ Child3,
+ primaryjoin=child3.c.id == parent.c.c3_id, post_update=True),
})
mapper(Child1, child1)
mapper(Child2, child2)
import sqlalchemy as sa
from sqlalchemy.testing import eq_, assert_raises_message
+
class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
def _assert_fully_loaded(self, users):
# iterated all the items with no sql.
f = util.flatten_iterator
assert any([i.keywords for i in
- f([o.items for o in f([u.orders for u in users])])])
+ f([o.items for o in f([u.orders for u in users])])])
self.assert_sql_count(testing.db, go, 0)
def _assert_addresses_loaded(self, users):
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
- lazy='subquery',
- order_by=item_keywords.c.keyword_id)))
+ lazy='subquery',
+ order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy='subquery',
- order_by=order_items.c.item_id)))
+ order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined',
- order_by=addresses.c.id),
+ order_by=addresses.c.id),
orders=relationship(Order, lazy='joined',
- order_by=orders.c.id)))
+ order_by=orders.c.id)))
return create_session()
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
- lazy='select',
- order_by=item_keywords.c.keyword_id)))
+ lazy='select',
+ order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy=True,
- order_by=order_items.c.item_id)))
+ order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy=True,
- order_by=addresses.c.id),
+ order_by=addresses.c.id),
orders=relationship(Order,
- order_by=orders.c.id)))
+ order_by=orders.c.id)))
return create_session()
sess = self._downgrade_fixture()
users = []
- # test _downgrade_fixture mapper defaults, 3 queries (2 subquery loads).
+ # test _downgrade_fixture mapper defaults, 3 queries (2 subquery
+ # loads).
def go():
users[:] = sess.query(self.classes.User)\
.order_by(self.classes.User.id)\
# redundant with last test, but illustrative
users[0].orders[0].items[0]
self.assert_sql_count(testing.db, go, 0)
+
def go():
users[0].orders[0].items[0].keywords
self.assert_sql_count(testing.db, go, 1)
def go():
users[0].orders[0].items[0]
self.assert_sql_count(testing.db, go, 0)
+
def go():
users[0].orders[0].items[0].keywords
self.assert_sql_count(testing.db, go, 1)
@classmethod
def define_tables(cls, metadata):
dt = Table('dt', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('col1', String(20)),
Column('col2', String(20),
server_default=sa.schema.FetchedValue()),
"WHERE dt.id IN (SELECT id FROM inserted);",
on='mssql'),
sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT "
- "ON dt "
- "FOR EACH ROW "
- "BEGIN "
- ":NEW.col2 := 'ins'; :NEW.col4 := 'ins'; END;",
- on='oracle'),
+ "ON dt "
+ "FOR EACH ROW "
+ "BEGIN "
+ ":NEW.col2 := 'ins'; :NEW.col4 := 'ins'; END;",
+ on='oracle'),
sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
- "FOR EACH ROW BEGIN "
- "SET NEW.col2='ins'; SET NEW.col4='ins'; END",
- on=lambda ddl, event, target, bind, **kw:
- bind.engine.name not in ('oracle', 'mssql', 'sqlite')
- ),
- ):
+ "FOR EACH ROW BEGIN "
+ "SET NEW.col2='ins'; SET NEW.col4='ins'; END",
+ on=lambda ddl, event, target, bind, **kw:
+ bind.engine.name not in ('oracle', 'mssql', 'sqlite')
+ ),
+ ):
event.listen(dt, 'after_create', ins)
event.listen(dt, 'before_drop', sa.DDL("DROP TRIGGER dt_ins"))
"WHERE dt.id IN (SELECT id FROM deleted);",
on='mssql'),
sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
- "FOR EACH ROW BEGIN "
- ":NEW.col3 := 'up'; :NEW.col4 := 'up'; END;",
- on='oracle'),
+ "FOR EACH ROW BEGIN "
+ ":NEW.col3 := 'up'; :NEW.col4 := 'up'; END;",
+ on='oracle'),
sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
- "FOR EACH ROW BEGIN "
- "SET NEW.col3='up'; SET NEW.col4='up'; END",
- on=lambda ddl, event, target, bind, **kw:
- bind.engine.name not in ('oracle', 'mssql', 'sqlite')
- ),
- ):
+ "FOR EACH ROW BEGIN "
+ "SET NEW.col3='up'; SET NEW.col4='up'; END",
+ on=lambda ddl, event, target, bind, **kw:
+ bind.engine.name not in ('oracle', 'mssql', 'sqlite')
+ ),
+ ):
event.listen(dt, 'after_create', up)
event.listen(dt, 'before_drop', sa.DDL("DROP TRIGGER dt_up"))
-
@classmethod
def setup_classes(cls):
class Default(cls.Comparable):
def test_insert(self):
Default = self.classes.Default
-
d1 = Default(id=1)
eq_(d1.col1, None)
eq_(d1.col3, 'up')
eq_(d1.col4, 'up')
+
class ExcludedDefaultsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
dt = Table('dt', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('col1', String(20), default="hello"),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('col1', String(20), default="hello"))
def test_exclude(self):
dt = self.tables.dt
sess.add(f1)
sess.flush()
eq_(dt.select().execute().fetchall(), [(1, "hello")])
-
Order, orders = self.classes.Order, self.tables.orders
-
mapper(Order, orders, properties={
'description': deferred(orders.c.description)})
self.assert_(o.description is None)
q = create_session().query(Order).order_by(Order.id)
+
def go():
- l = q.all()
- o2 = l[2]
+ result = q.all()
+ o2 = result[2]
x = o2.description
self.sql_eq_(go, [
"FROM orders ORDER BY orders.id", {}),
("SELECT orders.description AS orders_description "
"FROM orders WHERE orders.id = :param_1",
- {'param_1':3})])
+ {'param_1': 3})])
def test_defer_primary_key(self):
"""what happens when we try to defer the primary key?"""
Order, orders = self.classes.Order, self.tables.orders
-
mapper(Order, orders, properties={
'id': deferred(orders.c.id)})
q.first
)
-
def test_unsaved(self):
"""Deferred loading does not kick in when just PK cols are set."""
Order, orders = self.classes.Order, self.tables.orders
-
mapper(Order, orders, properties={
'description': deferred(orders.c.description)})
o = Order()
sess.add(o)
o.id = 7
+
def go():
o.description = "some description"
self.sql_count_(0, go)
orders, Order = self.tables.orders, self.classes.Order
mapper(Order, orders, properties={
- 'isopen':synonym('_isopen', map_column=True),
- 'description':deferred(orders.c.description, group='foo')
+ 'isopen': synonym('_isopen', map_column=True),
+ 'description': deferred(orders.c.description, group='foo')
})
sess = create_session()
sess = create_session()
o = Order()
sess.add(o)
+
def go():
o.description = "some description"
self.sql_count_(0, go)
orders, Order = self.tables.orders, self.classes.Order
-
mapper(Order, orders, properties=dict(
description=deferred(orders.c.description, group='primary'),
opened=deferred(orders.c.isopen, group='primary')))
o = Order()
sess.add(o)
o.id = 7
+
def go():
o.description = "some description"
self.sql_count_(0, go)
sess = create_session()
o = Order()
sess.add(o)
+
def go():
o.description = "some description"
self.sql_count_(0, go)
sess = create_session()
q = sess.query(Order).order_by(Order.id)
+
def go():
- l = q.all()
- o2 = l[2]
+ result = q.all()
+ o2 = result[2]
eq_(o2.opened, 1)
eq_(o2.userident, 7)
eq_(o2.description, 'order 3')
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen "
"FROM orders WHERE orders.id = :param_1",
- {'param_1':3})])
+ {'param_1': 3})])
o2 = q.all()[2]
eq_(o2.description, 'order 3')
assert o2 not in sess.dirty
o2.description = 'order 3'
+
def go():
sess.flush()
self.sql_count_(0, go)
def test_preserve_changes(self):
- """A deferred load operation doesn't revert modifications on attributes"""
+ """A deferred load operation doesn't revert modifications on attributes
+ """
orders, Order = self.tables.orders, self.classes.Order
- mapper(Order, orders, properties = {
+ mapper(Order, orders, properties={
'userident': deferred(orders.c.user_id, group='primary'),
'description': deferred(orders.c.description, group='primary'),
'opened': deferred(orders.c.isopen, group='primary')
assert 'userident' not in o.__dict__
o.description = 'somenewdescription'
eq_(o.description, 'somenewdescription')
+
def go():
eq_(o.opened, 1)
self.assert_sql_count(testing.db, go, 1)
orders, Order = self.tables.orders, self.classes.Order
- mapper(Order, orders, properties = {
+ mapper(Order, orders, properties={
'userident': deferred(orders.c.user_id, group='primary'),
'description': deferred(orders.c.description, group='primary'),
'opened': deferred(orders.c.isopen, group='primary')})
Order, orders = self.classes.Order, self.tables.orders
-
order_select = sa.select([
- orders.c.id,
- orders.c.user_id,
- orders.c.address_id,
- orders.c.description,
- orders.c.isopen]).alias()
+ orders.c.id,
+ orders.c.user_id,
+ orders.c.address_id,
+ orders.c.description,
+ orders.c.isopen]).alias()
mapper(Order, order_select, properties={
- 'description':deferred(order_select.c.description)
+ 'description': deferred(order_select.c.description)
})
sess = Session()
orders, Order = self.tables.orders, self.classes.Order
-
mapper(Order, orders)
sess = create_session()
"FROM orders ORDER BY orders.id", {}),
("SELECT orders.user_id AS orders_user_id "
"FROM orders WHERE orders.id = :param_1",
- {'param_1':1})])
+ {'param_1': 1})])
sess.expunge_all()
q2 = q.options(undefer('user_id'))
('userident', deferred(orders.c.user_id, group='primary')),
('description', deferred(orders.c.description, group='primary')),
('opened', deferred(orders.c.isopen, group='primary'))
- ]
- ))
+ ]
+ ))
sess = create_session()
q = sess.query(Order).order_by(Order.id)
+
def go():
- l = q.options(undefer_group('primary')).all()
- o2 = l[2]
+ result = q.options(undefer_group('primary')).all()
+ o2 = result[2]
eq_(o2.opened, 1)
eq_(o2.userident, 7)
eq_(o2.description, 'order 3')
('userident', deferred(orders.c.user_id, group='primary')),
('description', deferred(orders.c.description, group='primary')),
('opened', deferred(orders.c.isopen, group='secondary'))
- ]
- ))
+ ]
+ ))
sess = create_session()
q = sess.query(Order).order_by(Order.id)
+
def go():
- l = q.options(
+ result = q.options(
undefer_group('primary'), undefer_group('secondary')).all()
- o2 = l[2]
+ o2 = result[2]
eq_(o2.opened, 1)
eq_(o2.userident, 7)
eq_(o2.description, 'order 3')
('userident', deferred(orders.c.user_id, group='primary')),
('description', deferred(orders.c.description, group='primary')),
('opened', deferred(orders.c.isopen, group='secondary'))
- ]
- ))
+ ]))
sess = create_session()
q = sess.query(Order).order_by(Order.id)
+
def go():
- l = q.options(
- Load(Order).undefer_group('primary').undefer_group('secondary')).all()
- o2 = l[2]
+ result = q.options(
+ Load(Order).undefer_group('primary').undefer_group('secondary')
+ ).all()
+ o2 = result[2]
eq_(o2.opened, 1)
eq_(o2.userident, 7)
eq_(o2.description, 'order 3')
('userident', deferred(orders.c.user_id)),
('description', deferred(orders.c.description)),
('opened', deferred(orders.c.isopen))
- ]
- ))
+ ]))
sess = create_session()
q = sess.query(Order).options(Load(Order).undefer('*'))
self.assert_compile(q,
- "SELECT orders.user_id AS orders_user_id, "
- "orders.description AS orders_description, "
- "orders.isopen AS orders_isopen, "
- "orders.id AS orders_id, "
- "orders.address_id AS orders_address_id FROM orders"
- )
+ "SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id "
+ "FROM orders")
def test_locates_col(self):
"""changed in 1.0 - we don't search for deferred cols in the result
orders, Order = self.tables.orders, self.classes.Order
-
mapper(Order, orders, properties={
'description': deferred(orders.c.description)})
o1 = (sess.query(Order).
order_by(Order.id).
add_column(orders.c.description).first())[0]
+
def go():
eq_(o1.description, 'order 1')
# prior to 1.0 we'd search in the result for this column
orders, Order = self.tables.orders, self.classes.Order
-
mapper(Order, orders, properties={
'description': deferred(orders.c.description)})
stmt = sa.select([Order]).order_by(Order.id)
o1 = (sess.query(Order).
from_statement(stmt).all())[0]
+
def go():
eq_(o1.description, 'order 1')
# prior to 1.0 we'd search in the result for this column
self.sql_count_(1, go)
def test_deep_options(self):
- users, items, order_items, Order, Item, User, orders = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.orders)
+ users, items, order_items, Order, Item, User, orders = \
+ (self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.orders)
mapper(Item, items, properties=dict(
description=deferred(items.c.description)))
sess = create_session()
q = sess.query(User).order_by(User.id)
- l = q.all()
- item = l[0].orders[1].items[1]
+ result = q.all()
+ item = result[0].orders[1].items[1]
+
def go():
eq_(item.description, 'item 4')
self.sql_count_(1, go)
eq_(item.description, 'item 4')
sess.expunge_all()
- l = q.options(undefer('orders.items.description')).all()
- item = l[0].orders[1].items[1]
+ result = q.options(undefer('orders.items.description')).all()
+ item = result[0].orders[1].items[1]
+
def go():
eq_(item.description, 'item 4')
self.sql_count_(0, go)
order_items = self.tables.order_items
mapper(User, users, properties={
- "orders": relationship(Order, lazy="joined")
- })
+ "orders": relationship(Order, lazy="joined")
+ })
mapper(Order, orders, properties={
- "items": relationship(Item, secondary=order_items, lazy="joined")
- })
+ "items": relationship(Item, secondary=order_items, lazy="joined")
+ })
mapper(Item, items)
sess = create_session()
exp = ("SELECT users.id AS users_id, users.name AS users_name, "
- "items_1.id AS items_1_id, orders_1.id AS orders_1_id, "
- "orders_1.user_id AS orders_1_user_id, orders_1.address_id "
- "AS orders_1_address_id, orders_1.description AS "
- "orders_1_description, orders_1.isopen AS orders_1_isopen "
- "FROM users LEFT OUTER JOIN orders AS orders_1 "
- "ON users.id = orders_1.user_id LEFT OUTER JOIN "
- "(order_items AS order_items_1 JOIN items AS items_1 "
- "ON items_1.id = order_items_1.item_id) "
- "ON orders_1.id = order_items_1.order_id")
-
- q = sess.query(User).options(defer(User.orders, Order.items, Item.description))
- self.assert_compile(q, exp)
+ "items_1.id AS items_1_id, orders_1.id AS orders_1_id, "
+ "orders_1.user_id AS orders_1_user_id, orders_1.address_id "
+ "AS orders_1_address_id, orders_1.description AS "
+ "orders_1_description, orders_1.isopen AS orders_1_isopen "
+ "FROM users LEFT OUTER JOIN orders AS orders_1 "
+ "ON users.id = orders_1.user_id LEFT OUTER JOIN "
+ "(order_items AS order_items_1 JOIN items AS items_1 "
+ "ON items_1.id = order_items_1.item_id) "
+ "ON orders_1.id = order_items_1.order_id")
+ q = sess.query(User).options(
+ defer(User.orders, Order.items, Item.description))
+ self.assert_compile(q, exp)
def test_chained_multi_col_options(self):
users, User = self.tables.users, self.classes.User
orders, Order = self.tables.orders, self.classes.Order
mapper(User, users, properties={
- "orders": relationship(Order)
- })
+ "orders": relationship(Order)
+ })
mapper(Order, orders)
sess = create_session()
q = sess.query(User).options(
- joinedload(User.orders).defer("description").defer("isopen")
- )
+ joinedload(User.orders).defer("description").defer("isopen")
+ )
self.assert_compile(q,
- "SELECT users.id AS users_id, users.name AS users_name, "
- "orders_1.id AS orders_1_id, orders_1.user_id AS orders_1_user_id, "
- "orders_1.address_id AS orders_1_address_id FROM users "
- "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id"
- )
+ "SELECT users.id AS users_id, "
+ "users.name AS users_name, "
+ "orders_1.id AS orders_1_id, "
+ "orders_1.user_id AS orders_1_user_id, "
+ "orders_1.address_id AS orders_1_address_id "
+ "FROM users "
+ "LEFT OUTER JOIN orders AS orders_1 "
+ "ON users.id = orders_1.user_id")
def test_load_only_no_pk(self):
orders, Order = self.tables.orders, self.classes.Order
sess = create_session()
q = sess.query(Order).options(load_only("isopen", "description"))
self.assert_compile(q,
- "SELECT orders.id AS orders_id, "
- "orders.description AS orders_description, "
- "orders.isopen AS orders_isopen FROM orders")
+ "SELECT orders.id AS orders_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen FROM orders")
def test_load_only_no_pk_rt(self):
orders, Order = self.tables.orders, self.classes.Order
sess = create_session()
q = sess.query(Order).order_by(Order.id).\
- options(load_only("isopen", "description"))
+ options(load_only("isopen", "description"))
eq_(q.first(), Order(id=1))
def test_load_only_w_deferred(self):
orders, Order = self.tables.orders, self.classes.Order
mapper(Order, orders, properties={
- "description": deferred(orders.c.description)
- })
+ "description": deferred(orders.c.description)
+ })
sess = create_session()
q = sess.query(Order).options(
- load_only("isopen", "description"),
- undefer("user_id")
- )
+ load_only("isopen", "description"),
+ undefer("user_id")
+ )
self.assert_compile(q,
- "SELECT orders.description AS orders_description, "
- "orders.id AS orders_id, "
- "orders.user_id AS orders_user_id, "
- "orders.isopen AS orders_isopen FROM orders")
+ "SELECT orders.description AS orders_description, "
+ "orders.id AS orders_id, "
+ "orders.user_id AS orders_user_id, "
+ "orders.isopen AS orders_isopen FROM orders")
def test_load_only_propagate_unbound(self):
self._test_load_only_propagate(False)
addresses = self.tables.addresses
mapper(User, users, properties={
- "addresses": relationship(Address)
- })
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
sess = create_session()
expected = [
("SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8, 'id_1': 7}),
+ "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8,
+ 'id_1': 7}),
("SELECT addresses.id AS addresses_id, "
"addresses.email_address AS addresses_email_address "
- "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 7}),
+ "FROM addresses WHERE :param_1 = addresses.user_id",
+ {'param_1': 7}),
("SELECT addresses.id AS addresses_id, "
"addresses.email_address AS addresses_email_address "
- "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 8}),
+ "FROM addresses WHERE :param_1 = addresses.user_id",
+ {'param_1': 8}),
]
if use_load:
- opt = Load(User).defaultload(User.addresses).load_only("id", "email_address")
+ opt = Load(User).defaultload(
+ User.addresses).load_only("id", "email_address")
else:
opt = defaultload(User.addresses).load_only("id", "email_address")
q = sess.query(User).options(opt).filter(User.id.in_([7, 8]))
+
def go():
for user in q:
user.addresses
self.sql_eq_(go, expected)
-
def test_load_only_parent_specific(self):
User = self.classes.User
Address = self.classes.Address
sess = create_session()
q = sess.query(User, Order, Address).options(
- Load(User).load_only("name"),
- Load(Order).load_only("id"),
- Load(Address).load_only("id", "email_address")
- )
+ Load(User).load_only("name"),
+ Load(Order).load_only("id"),
+ Load(Address).load_only("id", "email_address")
+ )
- self.assert_compile(q,
- "SELECT users.id AS users_id, users.name AS users_name, "
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, "
+ "users.name AS users_name, "
"orders.id AS orders_id, "
- "addresses.id AS addresses_id, addresses.email_address "
- "AS addresses_email_address FROM users, orders, addresses"
- )
+ "addresses.id AS addresses_id, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM users, orders, addresses")
def test_load_only_path_specific(self):
User = self.classes.User
orders = self.tables.orders
mapper(User, users, properties=util.OrderedDict([
- ("addresses", relationship(Address, lazy="joined")),
- ("orders", relationship(Order, lazy="joined"))
- ]))
+ ("addresses", relationship(Address, lazy="joined")),
+ ("orders", relationship(Order, lazy="joined"))
+ ]))
mapper(Address, addresses)
mapper(Order, orders)
sess = create_session()
q = sess.query(User).options(
- load_only("name").defaultload("addresses").load_only("id", "email_address"),
- defaultload("orders").load_only("id")
- )
+ load_only("name").defaultload(
+ "addresses").load_only("id", "email_address"),
+ defaultload("orders").load_only("id")
+ )
# hmmmm joinedload seems to be forcing users.id into here...
self.assert_compile(
"managers.person_id) ON companies.company_id = people.company_id"
)
-
def test_defer_on_wildcard_subclass(self):
# pretty much the same as load_only except doesn't
# exclude the primary key
"ON people.person_id = managers.person_id "
"ORDER BY people.person_id"
)
-
-
-
-
@classmethod
def setup_mappers(cls):
- addresses_table, User, users_table, Address = (cls.tables.addresses_table,
- cls.classes.User,
- cls.tables.users_table,
- cls.classes.Address)
+ addresses_table, User, users_table, Address = \
+ (cls.tables.addresses_table,
+ cls.classes.User,
+ cls.tables.users_table,
+ cls.classes.Address)
mapper(User, users_table, properties=dict(
addresses=relationship(Address, backref='user'),
- ))
+ ))
mapper(Address, addresses_table)
@classmethod
def fixtures(cls):
return dict(
users_table=(
- ('id', 'name'),
- (1, 'jack'),
- (2, 'ed'),
- (3, 'fred'),
- (4, 'chuck')),
+ ('id', 'name'),
+ (1, 'jack'),
+ (2, 'ed'),
+ (3, 'fred'),
+ (4, 'chuck')),
addresses_table=(
- ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
- (1, 1, 'jack@jack.home', 'Personal', 0),
- (2, 1, 'jack@jack.bizz', 'Work', 1),
- (3, 2, 'ed@foo.bar', 'Personal', 0),
- (4, 3, 'fred@the.fred', 'Personal', 10)))
-
+ ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
+ (1, 1, 'jack@jack.home', 'Personal', 0),
+ (2, 1, 'jack@jack.bizz', 'Work', 1),
+ (3, 2, 'ed@foo.bar', 'Personal', 0),
+ (4, 3, 'fred@the.fred', 'Personal', 10)))
######################################################################
from sqlalchemy.orm.query import Query
cache = {}
+
class MyQuery(Query):
def get(self, ident, **kwargs):
if ident in cache:
Address = self.classes.Address
-
session = create_session()
ad1 = session.query(Address).populate_existing().get(1)
assert bool(ad1)
-
def test_apply_max(self):
"""Query.apply_max(col)
assert num == 3, num
num = (session.query(User).join('addresses').
- filter(Address.purpose=='Personal')).count()
+ filter(Address.purpose == 'Personal')).count()
assert num == 3, num
def test_count_whereclause(self):
assert user.name == 'ed'
user = (session.query(User).join('addresses').
- filter(Address.email_address=='fred@the.fred')).first()
+ filter(Address.email_address == 'fred@the.fred')).first()
assert user.name == 'fred'
user = session.query(User).filter(
- User.addresses.any(Address.email_address=='fred@the.fred')).first()
+ User.addresses.any(Address.email_address == 'fred@the.fred')
+ ).first()
assert user.name == 'fred'
def test_instances_entities(self):
"""
- addresses_table, User, users_table, Address = (self.tables.addresses_table,
- self.classes.User,
- self.tables.users_table,
- self.classes.Address)
+ addresses_table, User, users_table, Address = \
+ (self.tables.addresses_table,
+ self.classes.User,
+ self.tables.users_table,
+ self.classes.Address)
session = create_session()
session = create_session()
-
def test_join_to(self):
"""Query.join_to(key)
session = create_session()
-
def test_join_via(self):
"""Query.join_via(keys)
session = create_session()
-
def test_list(self):
"""Query.list()
session = create_session()
- user = session.query(User).filter(User.id==1).first()
- assert user.id==1
+ user = session.query(User).filter(User.id == 1).first()
+ assert user.id == 1
def test_select(self):
"""Query.select(arg=None, **kwargs)
session = create_session()
- users = session.query(User).filter(User.name != None).all()
+ users = session.query(User).filter(User.name != None).all() # noqa
assert len(users) == 4
def test_select_by(self):
users = session.query(User).filter_by(name='fred').all()
assert len(users) == 1
- users = session.query(User).filter(User.name=='fred').all()
+ users = session.query(User).filter(User.name == 'fred').all()
assert len(users) == 1
users = (session.query(User).join('addresses').
session = create_session()
- users = session.query(User).filter(User.name=='ed').all()
+ users = session.query(User).filter(User.name == 'ed').all()
assert len(users) == 1 and users[0].name == 'ed'
users = session.query(User).filter(text("name='ed'")).all()
assert len(users) == 1 and users[0].name == 'ed'
-
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import eq_
+
class TestDescriptor(descriptor_props.DescriptorProperty):
def __init__(self, cls, key, descriptor=None, doc=None,
- comparator_factory = None):
+ comparator_factory=None):
self.parent = cls.__mapper__
self.key = key
self.doc = doc
else:
self._comparator_factory = lambda mapper: None
+
class DescriptorInstrumentationTest(fixtures.ORMTest):
def _fixture(self):
Base = declarative_base()
def test_property_wrapped_classlevel(self):
Foo = self._fixture()
- prop = property(lambda self:None)
+ prop = property(lambda self: None)
Foo.foo = prop
d = TestDescriptor(Foo, 'foo')
def method1(self):
return "method1"
- prop = myprop(lambda self:None)
+ prop = myprop(lambda self: None)
Foo.foo = prop
d = TestDescriptor(Foo, 'foo')
"foo = upper(:upper_1)"
)
-
def test_aliased_comparator(self):
class Comparator(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self.__clause_element__()) ==\
- func.foobar(other)
+ func.foobar(other)
Foo = self._fixture()
Foo._name = Column('name', String)
eq_(
str(Foo.foo == 'ed'),
- "foobar(foo.name) = foobar(:foobar_1)"
+ "foobar(foo.name) = foobar(:foobar_1)"
)
eq_(
str(aliased(Foo).foo == 'ed'),
"foobar(foo_1.name) = foobar(:foobar_1)"
)
-
class _DynamicFixture(object):
def _user_address_fixture(self, addresses_args={}):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(
User, users, properties={
def _order_item_fixture(self, items_args={}):
items, Order, orders, order_items, Item = (self.tables.items,
- self.classes.Order,
- self.tables.orders,
- self.tables.order_items,
- self.classes.Item)
+ self.classes.Order,
+ self.tables.orders,
+ self.tables.order_items,
+ self.classes.Item)
mapper(
Order, orders, properties={
def test_no_m2o(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(
Address, addresses, properties={
'user': relationship(User, lazy='dynamic')})
def test_dynamic_on_backref(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses, properties={
'user': relationship(User,
- backref=backref('addresses', lazy='dynamic'))
+ backref=backref('addresses', lazy='dynamic'))
})
mapper(User, users)
'https://bugs.launchpad.net/ubuntu/+source/mysql-5.1/+bug/706988')
def test_association_nonaliased(self):
items, Order, orders, order_items, Item = (self.tables.items,
- self.classes.Order,
- self.tables.orders,
- self.tables.order_items,
- self.classes.Item)
+ self.classes.Order,
+ self.tables.orders,
+ self.tables.order_items,
+ self.classes.Item)
mapper(Order, orders, properties={
'items': relationship(Item,
- secondary=order_items,
- lazy="dynamic",
- order_by=order_items.c.item_id)
+ secondary=order_items,
+ lazy="dynamic",
+ order_by=order_items.c.item_id)
})
mapper(Item, items)
testing.db.scalar(
select(
[func.count(cast(1, Integer))]).
- where(addresses.c.user_id != None)),
+ where(addresses.c.user_id != None)), # noqa
0)
u1 = sess.query(User).get(u1.id)
u1.addresses.append(a1)
eq_(
testing.db.execute(
- select([addresses]).where(addresses.c.user_id != None)
+ select([addresses]).where(addresses.c.user_id != None) # noqa
).fetchall(),
[(a1.id, u1.id, 'foo')]
)
testing.db.scalar(
select(
[func.count(cast(1, Integer))]).
- where(addresses.c.user_id != None)),
+ where(addresses.c.user_id != None)), # noqa
0
)
sess.flush()
eq_(
testing.db.execute(
- select([addresses]).where(addresses.c.user_id != None)
+ select([addresses]).where(addresses.c.user_id != None) # noqa
).fetchall(),
[(a1.id, u1.id, 'foo')]
)
sess.flush()
eq_(
testing.db.execute(
- select([addresses]).where(addresses.c.user_id != None)
+ select([addresses]).where(addresses.c.user_id != None) # noqa
).fetchall(),
[(a2.id, u1.id, 'bar')]
)
sess.commit()
eq_(
testing.db.scalar(
- select([func.count('*')]).where(addresses.c.user_id == None)),
+ select([func.count('*')]).where(
+ addresses.c.user_id == None)), # noqa
0)
eq_(
testing.db.scalar(
- select([func.count('*')]).where(addresses.c.user_id != None)),
+ select([func.count('*')]).where(
+ addresses.c.user_id != None)), # noqa
6)
sess.delete(u)
eq_(
testing.db.scalar(
select([func.count('*')]).where(
- addresses.c.user_id == None
+ addresses.c.user_id == None # noqa
)
),
6
eq_(
testing.db.scalar(
select([func.count('*')]).where(
- addresses.c.user_id != None
+ addresses.c.user_id != None # noqa
)
),
0
eq_(
attributes.get_history(obj, attrname,
- attributes.LOAD_AGAINST_COMMITTED),
+ attributes.LOAD_AGAINST_COMMITTED),
compare_passive
)
u1.addresses.append(a1)
self._assert_history(u1,
- ([a1], [], [])
- )
+ ([a1], [], []))
def test_append_persistent(self):
u1, a1, s = self._persistent_fixture()
u1.addresses.append(a1)
self._assert_history(u1,
- ([a1], [], [])
- )
+ ([a1], [], [])
+ )
def test_remove_transient(self):
u1, a1 = self._transient_fixture()
u1.addresses.remove(a1)
self._assert_history(u1,
- ([], [], [])
- )
+ ([], [], []))
def test_backref_pop_transient(self):
u1, a1 = self._transient_fixture(addresses_args={"backref": "user"})
u1.addresses.append(a1)
self._assert_history(u1,
- ([a1], [], []),
- )
+ ([a1], [], []))
a1.user = None
# removed from added
self._assert_history(u1,
- ([], [], []),
- )
+ ([], [], []))
def test_remove_persistent(self):
u1, a1, s = self._persistent_fixture()
u1.addresses.remove(a1)
self._assert_history(u1,
- ([], [], [a1])
- )
+ ([], [], [a1]))
def test_backref_pop_persistent_autoflush_o2m_active_hist(self):
u1, a1, s = self._persistent_fixture(
a1.user = None
self._assert_history(u1,
- ([], [], [a1]),
- )
+ ([], [], [a1]))
def test_backref_pop_persistent_autoflush_m2m(self):
o1, i1, s = self._persistent_m2m_fixture(
i1.orders.remove(o1)
self._assert_history(o1,
- ([], [], [i1]),
- )
+ ([], [], [i1]))
def test_backref_pop_persistent_noflush_m2m(self):
o1, i1, s = self._persistent_m2m_fixture(
i1.orders.remove(o1)
self._assert_history(o1,
- ([], [], [i1]),
- )
+ ([], [], [i1]))
def test_unchanged_persistent(self):
Address = self.classes.Address
u1.addresses.remove(a2)
self._assert_history(u1,
- ([a3], [a1], [a2]),
- compare_passive=([a3], [], [a2])
- )
+ ([a3], [a1], [a2]),
+ compare_passive=([a3], [], [a2]))
def test_replace_transient(self):
Address = self.classes.Address
u1.addresses = [a2, a3, a4, a5]
self._assert_history(u1,
- ([a2, a3, a4, a5], [], [])
- )
+ ([a2, a3, a4, a5], [], []))
def test_replace_persistent_noflush(self):
Address = self.classes.Address
u1.addresses = [a2, a3, a4, a5]
self._assert_history(u1,
- ([a2, a3, a4, a5], [], [])
- )
+ ([a2, a3, a4, a5], [], []))
def test_replace_persistent_autoflush(self):
Address = self.classes.Address
u1.addresses = [a2, a3, a4, a5]
self._assert_history(u1,
- ([a3, a4, a5], [a2], [a1]),
- compare_passive=([a3, a4, a5], [], [a1])
- )
+ ([a3, a4, a5], [a2], [a1]),
+ compare_passive=([a3, a4, a5], [], [a1]))
def test_persistent_but_readded_noflush(self):
u1, a1, s = self._persistent_fixture(autoflush=False)
u1.addresses.append(a1)
self._assert_history(u1,
- ([], [a1], []),
- compare_passive=([a1], [], [])
- )
+ ([], [a1], []),
+ compare_passive=([a1], [], []))
def test_persistent_but_readded_autoflush(self):
u1, a1, s = self._persistent_fixture(autoflush=True)
u1.addresses.append(a1)
self._assert_history(u1,
- ([], [a1], []),
- compare_passive=([a1], [], [])
- )
+ ([], [a1], []),
+ compare_passive=([a1], [], []))
def test_missing_but_removed_noflush(self):
u1, a1, s = self._persistent_fixture(autoflush=False)
Address(id=5)
]),
User(id=10, addresses=[])
- ], q.order_by(User.id).all())
+ ], q.order_by(User.id).all())
def test_orderby_multi(self):
users, Address, addresses, User = (
Address(id=5)
]),
User(id=10, addresses=[])
- ], q.order_by(User.id).all())
+ ], q.order_by(User.id).all())
def test_orderby_related(self):
"""A regular mapper select on a single table can
))
q = create_session().query(User)
- l = q.filter(User.id == Address.user_id).order_by(
+ result = q.filter(User.id == Address.user_id).order_by(
Address.email_address).all()
eq_([
User(id=7, addresses=[
Address(id=1)
]),
- ], l)
+ ], result)
def test_orderby_desc(self):
Address, addresses, users, User = (self.classes.Address,
Address(id=5)
]),
User(id=10, addresses=[])
- ], sess.query(User).order_by(User.id).all())
+ ], sess.query(User).order_by(User.id).all())
def test_no_ad_hoc_orderby(self):
"""part of #2992; make sure string label references can't
"ORDER BY email_address"
)
-
-
def test_deferred_fk_col(self):
users, Dingaling, User, dingalings, Address, addresses = (
self.tables.users,
),
User(id=10)
- ], q.all())
+ ], q.all())
self.assert_sql_count(testing.db, go, 1)
def test_double_same_mappers(self):
Item(id=2),
Item(id=3)])]),
User(id=10)
- ], q.all())
+ ], q.all())
self.assert_sql_count(testing.db, go, 1)
def test_no_false_hits(self):
sess = create_session()
q = sess.query(User)
- l = q.order_by(User.id).limit(2).offset(1).all()
- eq_(self.static.user_all_result[1:3], l)
+ result = q.order_by(User.id).limit(2).offset(1).all()
+ eq_(self.static.user_all_result[1:3], result)
def test_distinct(self):
Address, addresses, users, User = (self.classes.Address,
q = sess.query(User)
def go():
- l = q.filter(s.c.u2_id == User.id).distinct().\
+ result = q.filter(s.c.u2_id == User.id).distinct().\
order_by(User.id).all()
- eq_(self.static.user_address_result, l)
+ eq_(self.static.user_address_result, result)
self.assert_sql_count(testing.db, go, 1)
def test_limit_2(self):
sess = create_session()
q = sess.query(Item)
- l = q.filter((Item.description == 'item 2') |
- (Item.description == 'item 5') |
- (Item.description == 'item 3')).\
+ result = q.filter((Item.description == 'item 2') |
+ (Item.description == 'item 5') |
+ (Item.description == 'item 3')).\
order_by(Item.id).limit(2).all()
- eq_(self.static.item_keyword_result[1:3], l)
+ eq_(self.static.item_keyword_result[1:3], result)
def test_limit_3(self):
"""test that the ORDER BY is propagated from the inner
q = sess.query(User)
if not testing.against('mssql'):
- l = q.join('orders').order_by(
+ result = q.join('orders').order_by(
Order.user_id.desc()).limit(2).offset(1)
eq_([
User(id=9,
orders=[Order(id=1), Order(id=3), Order(id=5)],
addresses=[Address(id=1)]
)
- ], l.all())
+ ], result.all())
- l = q.join('addresses').order_by(
+ result = q.join('addresses').order_by(
Address.email_address.desc()).limit(1).offset(0)
eq_([
User(id=7,
orders=[Order(id=1), Order(id=3), Order(id=5)],
addresses=[Address(id=1)]
)
- ], l.all())
+ ], result.all())
def test_limit_4(self):
User, Order, addresses, users, orders = (self.classes.User,
"addresses_email_address FROM addresses WHERE :param_1 = "
"addresses.user_id",
{'param_1': 8})
- )
+ )
def test_useget_cancels_eager_propagated_present(self):
"""test that a one to many lazyload cancels the unnecessary
propagate_to_loaders = True
sess = create_session()
- u1 = sess.query(User).options(MyBogusOption()).filter(User.id == 8).one()
+ u1 = sess.query(User).options(
+ MyBogusOption()).filter(User.id == 8).one()
def go():
eq_(u1.addresses[0].user, u1)
"addresses_email_address FROM addresses WHERE :param_1 = "
"addresses.user_id",
{'param_1': 8})
- )
-
+ )
def test_manytoone_limit(self):
"""test that the subquery wrapping only occurs with
q = create_session().query(User)
def go():
- l = q.filter(users.c.id == 7).all()
- eq_([User(id=7, address=Address(id=1))], l)
+ result = q.filter(users.c.id == 7).all()
+ eq_([User(id=7, address=Address(id=1))], result)
self.assert_sql_count(testing.db, go, 1)
def test_one_to_many_scalar_subq_wrapping(self):
mapper(Address, addresses),
primaryjoin=and_(
addresses.c.id == orders.c.address_id,
- addresses.c.email_address != None
+ addresses.c.email_address != None # noqa
),
lazy='joined')
q = create_session().query(User)
- l = q.filter(text("users.id in (7, 8, 9)")).order_by(text("users.id"))
+ result = q.filter(text("users.id in (7, 8, 9)")).order_by(
+ text("users.id"))
def go():
- eq_(self.static.user_order_result[0:3], l.all())
+ eq_(self.static.user_order_result[0:3], result.all())
self.assert_sql_count(testing.db, go, 1)
def test_double_with_aggregate(self):
max_order=Order(id=4)
),
User(id=10),
- ], q.order_by(User.id).all())
+ ], q.order_by(User.id).all())
self.assert_sql_count(testing.db, go, 1)
def test_uselist_false_warning(self):
orders=relationship(Order, lazy=False, order_by=orders.c.id),
))
q = create_session().query(User)
+
def go():
eq_(self.static.user_all_result, q.order_by(User.id).all())
self.assert_sql_count(testing.db, go, 1)
eq_([
Order(id=3, user=User(id=7)),
Order(id=4, user=User(id=9))
- ], q.all())
+ ], q.all())
q = q.select_from(s.join(order_items).join(items)).filter(
~Item.id.in_([1, 2, 5]))
eq_([
Order(id=3, user=User(id=7)),
- ], q.all())
+ ], q.all())
def test_aliasing(self):
"""test that eager loading uses aliases to insulate the eager
lazy='joined', order_by=addresses.c.id)
))
q = create_session().query(User)
- l = q.filter(addresses.c.email_address == 'ed@lala.com').filter(
+ result = q.filter(addresses.c.email_address == 'ed@lala.com').filter(
Address.user_id == User.id).order_by(User.id)
- eq_(self.static.user_address_result[1:2], l.all())
+ eq_(self.static.user_address_result[1:2], result.all())
def test_inner_join(self):
Address, addresses, users, User = (self.classes.Address,
"ON users.id = addresses_1.user_id"
)
-
def test_catch_the_right_target(self):
# test eager join chaining to the "nested" join on the left,
# a new feature as of [ticket:2369]
[
A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
- ])
+ ])
class SelfReferentialEagerTest(fixtures.MappedTest):
Node(data='n123')
]),
Node(data='n13')
- ]), d)
+ ]), d)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
Node(data='n123')
]),
Node(data='n13')
- ]), d)
+ ]), d)
self.assert_sql_count(testing.db, go, 1)
def test_lazy_fallback_doesnt_affect_eager(self):
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
- ], list(n12.children))
+ ], list(n12.children))
self.assert_sql_count(testing.db, go, 1)
def test_with_deferred(self):
Node(data='n123')
]),
Node(data='n13')
- ]), d)
+ ]), d)
self.assert_sql_count(testing.db, go, 2)
def go():
Node(data='n123')
]),
Node(data='n13')
- ]), d)
+ ]), d)
self.assert_sql_count(testing.db, go, 3)
B,
remote_side=[b_table.c.id],
primaryjoin=(b_table.c.parent_b2_id == b_table.c.id),
- order_by = b_table.c.id
+ order_by=b_table.c.id
)
})
# PYTHONHASHSEED
in_('d', a1.c.__dict__)
+
class EntityViaMultiplePathTestTwo(fixtures.DeclarativeMappedTest):
"""test for [ticket:3431]"""
def test_compare_to_none(self):
User = self.classes.User
- eval_eq(User.name == None, testcases=[
- (User(name='foo'), False),
- (User(name=None), True),
- ])
+ eval_eq(User.name == None, # noqa
+ testcases=[(User(name='foo'), False), (User(name=None), True)])
def test_true_false(self):
User = self.classes.User
eval_eq(
- User.name == False, testcases=[
+ User.name == False, # noqa
+ testcases=[
(User(name='foo'), False),
(User(name=True), False),
(User(name=False), True),
)
eval_eq(
- User.name == True, testcases=[
+ User.name == True, # noqa
+ testcases=[
(User(name='foo'), False),
(User(name=True), True),
(User(name=False), False),
(User(id=None, name='foo'), None),
(User(id=None, name=None), None),
])
-
event.listen(m, "before_insert", canary.listen1, )
event.listen(m, "before_insert", canary.listen2, insert=True)
- event.listen(m, "before_insert", canary.listen3, propagate=True, insert=True)
+ event.listen(m, "before_insert", canary.listen3,
+ propagate=True, insert=True)
event.listen(m, "load", canary.listen4)
event.listen(m, "load", canary.listen5, insert=True)
event.listen(m, "load", canary.listen6, propagate=True, insert=True)
u.name = 'ed'
sess.commit()
eq_(canary, ['before_commit', 'before_flush',
- 'after_transaction_create', 'after_flush',
+ 'after_transaction_create', 'after_flush',
'after_flush_postexec',
'after_transaction_end',
'after_commit',
[
call.persistent_to_deleted(sess, u1_inst_state.obj()),
call.flag_checked(u1_inst_state.obj())
- ]
- )
-
-
+ ]
+ )
def test_detached_to_persistent(self):
sess, User, start_events = self._fixture()
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import mapper, relationship, create_session, \
- attributes, deferred, exc as orm_exc, defer, undefer,\
- strategies, state, lazyload, backref, Session
+ attributes, deferred, exc as orm_exc, defer, undefer,\
+ strategies, state, lazyload, backref, Session
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.sql import select
+
class ExpireTest(_fixtures.FixtureTest):
def test_expire(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user'),
- })
+ 'addresses': relationship(Address, backref='user'),
+ })
mapper(Address, addresses)
sess = create_session()
u.name = 'foo'
sess.flush()
# change the value in the DB
- users.update(users.c.id==7, values=dict(name='jack')).execute()
+ users.update(users.c.id == 7, values=dict(name='jack')).execute()
sess.expire(u)
# object isn't refreshed yet, using dict to bypass trigger
assert u.__dict__.get('name') != 'jack'
s.expunge_all()
assert_raises_message(sa_exc.InvalidRequestError,
- r"is not persistent within this Session", s.expire, u)
+ r"is not persistent within this Session",
+ s.expire, u)
def test_get_refreshes(self):
users, User = self.tables.users, self.classes.User
def go():
u = s.query(User).get(10) # get() refreshes
self.assert_sql_count(testing.db, go, 1)
+
def go():
eq_(u.name, 'chuck') # attributes unexpired
self.assert_sql_count(testing.db, go, 0)
+
def go():
u = s.query(User).get(10) # expire flag reset, so not expired
self.assert_sql_count(testing.db, go, 0)
u = s.query(User).get(10)
s.expire_all()
- s.execute(users.delete().where(User.id==10))
+ s.execute(users.delete().where(User.id == 10))
# object is gone, get() returns None, removes u from session
assert u in s
assert s.query(User).get(10) is None
- assert u not in s # and expunges
+ assert u not in s # and expunges
def test_refresh_on_deleted_raises(self):
users, User = self.tables.users, self.classes.User
s.expire_all()
s.expire_all()
- s.execute(users.delete().where(User.id==10))
+ s.execute(users.delete().where(User.id == 10))
# raises ObjectDeletedError
assert_raises_message(
s = create_session(autocommit=False)
u = s.query(User).get(10)
s.expire_all()
- s.execute(users.delete().where(User.id==10))
+ s.execute(users.delete().where(User.id == 10))
# do a get()/remove u from session
assert s.query(User).get(10) is None
Order, orders = self.classes.Order, self.tables.orders
-
mapper(Order, orders, properties={
- 'description':deferred(orders.c.description)})
+ 'description': deferred(orders.c.description)})
s = create_session()
o1 = s.query(Order).first()
users, User = self.tables.users, self.classes.User
mapper(User, users, properties={
- 'name':deferred(users.c.name)
+ 'name': deferred(users.c.name)
})
s = create_session(autocommit=False)
u = s.query(User).get(10)
assert 'name' not in u.__dict__
- s.execute(users.delete().where(User.id==10))
+ s.execute(users.delete().where(User.id == 10))
assert_raises_message(
sa.orm.exc.ObjectDeletedError,
"Instance '<User at .*?>' has been "
def test_lazyload_autoflushes(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- order_by=addresses.c.email_address)
+ 'addresses': relationship(Address,
+ order_by=addresses.c.email_address)
})
mapper(Address, addresses)
s = create_session(autoflush=True, autocommit=False)
immediate refresh of a collection"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, order_by=addresses.c.email_address)
+ 'addresses': relationship(Address,
+ order_by=addresses.c.email_address)
})
mapper(Address, addresses)
s = create_session(autoflush=True, autocommit=False)
u = s.query(User).get(8)
assert_raises_message(sa_exc.InvalidRequestError,
- "properties specified for refresh",
- s.refresh, u, ['addresses'])
+ "properties specified for refresh",
+ s.refresh, u, ['addresses'])
# in contrast to a regular query with no columns
assert_raises_message(sa_exc.InvalidRequestError,
- "no columns with which to SELECT", s.query().all)
+ "no columns with which to SELECT", s.query().all)
def test_refresh_cancels_expire(self):
users, User = self.tables.users, self.classes.User
u = sess.query(User).get(7)
sess.expire(u, attribute_names=['name'])
+
def go():
u.name = 'somenewname'
self.assert_sql_count(testing.db, go, 0)
sess.add(u)
assert_raises(sa_exc.InvalidRequestError, getattr, u, 'name')
-
def test_expire_preserves_changes(self):
- """test that the expire load operation doesn't revert post-expire changes"""
+ """test that the expire load operation doesn't revert post-expire
+ changes"""
Order, orders = self.classes.Order, self.tables.orders
-
mapper(Order, orders)
sess = create_session()
o = sess.query(Order).get(3)
sess.expire(o)
o.description = "order 3 modified"
+
def go():
assert o.isopen == 1
self.assert_sql_count(testing.db, go, 1)
assert o.description is None
- o.isopen=15
+ o.isopen = 15
sess.expire(o, ['isopen', 'description'])
o.description = 'some new description'
sess.query(Order).all()
sess.expire(o, ['isopen', 'description'])
sess.query(Order).all()
del o.isopen
+
def go():
assert o.isopen is None
self.assert_sql_count(testing.db, go, 0)
- o.isopen=14
+ o.isopen = 14
sess.expire(o)
o.description = 'another new description'
sess.query(Order).all()
assert o.description == 'another new description'
def test_expire_committed(self):
- """test that the committed state of the attribute receives the most recent DB data"""
+ """test that the committed state of the attribute receives the most
+ recent DB data"""
orders, Order = self.tables.orders, self.classes.Order
orders.update().execute(description='order 3 modified')
assert o.isopen == 1
- assert attributes.instance_state(o).dict['description'] == 'order 3 modified'
+ assert attributes.instance_state(o) \
+ .dict['description'] == 'order 3 modified'
+
def go():
sess.flush()
self.assert_sql_count(testing.db, go, 0)
def test_expire_cascade(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, cascade="all, refresh-expire")
+ 'addresses': relationship(Address, cascade="all, refresh-expire")
})
mapper(Address, addresses)
s = create_session()
def test_refresh_cascade(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, cascade="all, refresh-expire")
+ 'addresses': relationship(Address, cascade="all, refresh-expire")
})
mapper(Address, addresses)
s = create_session()
def _test_cascade_to_pending(self, cascade, expire_or_refresh):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, cascade=cascade)
+ 'addresses': relationship(Address, cascade=cascade)
})
mapper(Address, addresses)
s = create_session()
def test_expired_lazy(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user'),
- })
+ 'addresses': relationship(Address, backref='user'),
+ })
mapper(Address, addresses)
sess = create_session()
def test_expired_eager(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined'),
- })
+ 'addresses': relationship(Address, backref='user', lazy='joined'),
+ })
mapper(Address, addresses)
sess = create_session()
def test_relationship_changes_preserved(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined'),
- })
+ 'addresses': relationship(Address, backref='user', lazy='joined'),
+ })
mapper(Address, addresses)
sess = create_session()
u = sess.query(User).get(8)
def test_joinedload_props_dontload(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- # relationships currently have to load separately from scalar instances.
+ # relationships currently have to load separately from scalar instances
# the use case is: expire "addresses". then access it. lazy load
# fires off to load "addresses", but needs foreign key or primary key
# attributes in order to lazy load; hits those attributes, such as
# below it hits "u.id". "u.id" triggers full unexpire operation,
- # joinedloads addresses since lazy='joined'. this is all within lazy load
- # which fires unconditionally; so an unnecessary joinedload (or
+ # joinedloads addresses since lazy='joined'. this is all within lazy
+ # load which fires unconditionally; so an unnecessary joinedload (or
# lazyload) was issued. would prefer not to complicate lazyloading to
# "figure out" that the operation should be aborted right now.
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined'),
- })
+ 'addresses': relationship(Address, backref='user', lazy='joined'),
+ })
mapper(Address, addresses)
sess = create_session()
u = sess.query(User).get(8)
sess.expire(u)
assert 'name' not in u.__dict__
- users.update(users.c.id==7).execute(name='jack2')
+ users.update(users.c.id == 7).execute(name='jack2')
assert u.name == 'jack2'
assert u.uname == 'jack2'
assert 'name' in u.__dict__
assert 'description' not in o.__dict__
assert attributes.instance_state(o).dict['isopen'] == 1
- orders.update(orders.c.id==3).execute(description='order 3 modified')
+ orders.update(orders.c.id == 3).execute(description='order 3 modified')
def go():
assert o.description == 'order 3 modified'
self.assert_sql_count(testing.db, go, 1)
- assert attributes.instance_state(o).dict['description'] == 'order 3 modified'
+ assert attributes.instance_state(o) \
+ .dict['description'] == 'order 3 modified'
o.isopen = 5
sess.expire(o, attribute_names=['description'])
assert o.description == 'order 3 modified'
self.assert_sql_count(testing.db, go, 1)
assert o.__dict__['isopen'] == 5
- assert attributes.instance_state(o).dict['description'] == 'order 3 modified'
+ assert attributes.instance_state(o) \
+ .dict['description'] == 'order 3 modified'
assert attributes.instance_state(o).committed_state['isopen'] == 1
sess.flush()
assert 'id' not in o.__dict__
assert 'isopen' not in o.__dict__
assert 'description' not in o.__dict__
+
def go():
assert o.description == 'order 3 modified'
assert o.id == 3
def test_partial_expire_lazy(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user'),
- })
+ 'addresses': relationship(Address, backref='user'),
+ })
mapper(Address, addresses)
sess = create_session()
# hit the lazy loader. just does the lazy load,
# doesn't do the overall refresh
def go():
- assert u.addresses[0].email_address=='ed@wood.com'
+ assert u.addresses[0].email_address == 'ed@wood.com'
self.assert_sql_count(testing.db, go, 1)
assert 'name' not in u.__dict__
# check that mods to expired lazy-load attributes
# only do the lazy load
sess.expire(u, ['name', 'addresses'])
+
def go():
u.addresses = [Address(id=10, email_address='foo@bar.com')]
self.assert_sql_count(testing.db, go, 1)
# so the addresses collection got committed and is
# longer expired
def go():
- assert u.addresses[0].email_address=='foo@bar.com'
+ assert u.addresses[0].email_address == 'foo@bar.com'
assert len(u.addresses) == 1
self.assert_sql_count(testing.db, go, 0)
def test_partial_expire_eager(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined'),
- })
+ 'addresses': relationship(Address, backref='user', lazy='joined'),
+ })
mapper(Address, addresses)
sess = create_session()
assert 'addresses' not in u.__dict__
def go():
- assert u.addresses[0].email_address=='ed@wood.com'
+ assert u.addresses[0].email_address == 'ed@wood.com'
self.assert_sql_count(testing.db, go, 1)
# check that mods to expired eager-load attributes
# do the refresh
sess.expire(u, ['name', 'addresses'])
+
def go():
u.addresses = [Address(id=10, email_address='foo@bar.com')]
self.assert_sql_count(testing.db, go, 1)
# this should ideally trigger the whole load
# but currently it works like the lazy case
def go():
- assert u.addresses[0].email_address=='foo@bar.com'
+ assert u.addresses[0].email_address == 'foo@bar.com'
assert len(u.addresses) == 1
self.assert_sql_count(testing.db, go, 0)
self.assert_sql_count(testing.db, go, 1)
# ideally, this was already loaded, but we arent
# doing it that way right now
- #self.assert_sql_count(testing.db, go, 0)
+ # self.assert_sql_count(testing.db, go, 0)
def test_relationships_load_on_query(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user'),
- })
+ 'addresses': relationship(Address, backref='user'),
+ })
mapper(Address, addresses)
sess = create_session()
assert 'description' not in o.__dict__
# test that the deferred attribute triggers the full
# reload
+
def go():
assert o.description == 'order 3'
assert o.isopen == 1
assert 'description' not in o.__dict__
# test that the deferred attribute triggers the full
# reload
+
def go():
assert o.description == 'order 3'
assert o.isopen == 1
def test_joinedload_query_refreshes(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined'),
- })
+ 'addresses': relationship(Address, backref='user', lazy='joined'),
+ })
mapper(Address, addresses)
sess = create_session()
@testing.requires.predictable_gc
def test_expire_all(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined',
- order_by=addresses.c.id),
- })
+ 'addresses': relationship(Address, backref='user', lazy='joined',
+ order_by=addresses.c.id),
+ })
mapper(Address, addresses)
sess = create_session()
eq_(len(list(sess)), 9)
sess.expire_all()
gc_collect()
- eq_(len(list(sess)), 4) # since addresses were gc'ed
+ eq_(len(list(sess)), 4) # since addresses were gc'ed
userlist = sess.query(User).order_by(User.id).all()
u = userlist[1]
eq_(len(list(sess)), 9)
def test_state_change_col_to_deferred(self):
- """Behavioral test to verify the current activity of loader callables."""
+ """Behavioral test to verify the current activity of loader callables
+ """
users, User = self.tables.users, self.classes.User
assert 'name' not in attributes.instance_state(u1).callables
def test_state_deferred_to_col(self):
- """Behavioral test to verify the current activity of loader callables."""
+ """Behavioral test to verify the current activity of loader callables
+ """
users, User = self.tables.users, self.classes.User
assert 'name' not in attributes.instance_state(u1).callables
def test_state_noload_to_lazy(self):
- """Behavioral test to verify the current activity of loader callables."""
+ """Behavioral test to verify the current activity of loader callables
+ """
users, Address, addresses, User = (
self.tables.users,
)
# expire, it stays
sess.expire(u1)
- assert 'addresses' not in attributes.instance_state(u1).expired_attributes
+ assert 'addresses' not in attributes.instance_state(u1) \
+ .expired_attributes
assert isinstance(
attributes.instance_state(u1).callables['addresses'],
strategies.LoadLazyAttribute
# load over it. callable goes away.
sess.query(User).first()
- assert 'addresses' not in attributes.instance_state(u1).expired_attributes
+ assert 'addresses' not in attributes.instance_state(u1) \
+ .expired_attributes
assert 'addresses' not in attributes.instance_state(u1).callables
sess.expunge_all()
u1 = sess.query(User).options(lazyload(User.addresses)).first()
sess.expire(u1, ['addresses'])
- assert 'addresses' not in attributes.instance_state(u1).expired_attributes
+ assert 'addresses' not in attributes.instance_state(u1) \
+ .expired_attributes
assert isinstance(
attributes.instance_state(u1).callables['addresses'],
strategies.LoadLazyAttribute
# load the attr, goes away
u1.addresses
- assert 'addresses' not in attributes.instance_state(u1).expired_attributes
+ assert 'addresses' not in attributes.instance_state(u1) \
+ .expired_attributes
assert 'addresses' not in attributes.instance_state(u1).callables
+
class PolymorphicExpireTest(fixtures.MappedTest):
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)),
- )
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
- primary_key=True),
- Column('status', String(30)),
- )
+ Column('person_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)))
@classmethod
def setup_classes(cls):
class Person(cls.Basic):
pass
+
class Engineer(Person):
pass
people, engineers = cls.tables.people, cls.tables.engineers
people.insert().execute(
- {'person_id':1, 'name':'person1', 'type':'person'},
- {'person_id':2, 'name':'engineer1', 'type':'engineer'},
- {'person_id':3, 'name':'engineer2', 'type':'engineer'},
+ {'person_id': 1, 'name': 'person1', 'type': 'person'},
+ {'person_id': 2, 'name': 'engineer1', 'type': 'engineer'},
+ {'person_id': 3, 'name': 'engineer2', 'type': 'engineer'},
)
engineers.insert().execute(
- {'person_id':2, 'status':'new engineer'},
- {'person_id':3, 'status':'old engineer'},
+ {'person_id': 2, 'status': 'new engineer'},
+ {'person_id': 3, 'status': 'old engineer'},
)
@classmethod
def setup_mappers(cls):
Person, people, engineers, Engineer = (cls.classes.Person,
- cls.tables.people,
- cls.tables.engineers,
- cls.classes.Engineer)
+ cls.tables.people,
+ cls.tables.engineers,
+ cls.classes.Engineer)
- mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person')
- mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer')
+ mapper(Person, people, polymorphic_on=people.c.type,
+ polymorphic_identity='person')
+ mapper(Engineer, engineers, inherits=Person,
+ polymorphic_identity='engineer')
def test_poly_deferred(self):
Person, people, Engineer = (self.classes.Person,
- self.tables.people,
- self.classes.Engineer)
-
+ self.tables.people,
+ self.classes.Engineer)
sess = create_session()
[p1, e1, e2] = sess.query(Person).order_by(people.c.person_id).all()
assert e1.status == 'new engineer'
assert e2.status == 'old engineer'
self.assert_sql_count(testing.db, go, 2)
- eq_(Engineer.name.get_history(e1), (['new engineer name'],(), ['engineer1']))
+ eq_(Engineer.name.get_history(e1),
+ (['new engineer name'], (), ['engineer1']))
def test_no_instance_key(self):
Engineer = self.classes.Engineer
-
sess = create_session()
e1 = sess.query(Engineer).get(2)
def test_expired_pending(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user'),
- })
+ 'addresses': relationship(Address, backref='user'),
+ })
mapper(Address, addresses)
sess = create_session()
sess.expire(u1, ['addresses'])
# insert a new row
- sess.execute(addresses.insert(), dict(email_address='a3', user_id=u1.id))
+ sess.execute(addresses.insert(), dict(
+ email_address='a3', user_id=u1.id))
# only two addresses pulled from the DB, no "pending"
assert len(u1.addresses) == 2
@classmethod
def define_tables(cls, metadata):
Table("data", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
Table("data_fetched", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30), FetchedValue()),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30), FetchedValue()))
Table("data_defer", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30)),
- Column('data2', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)),
+ Column('data2', String(30)))
@classmethod
def setup_classes(cls):
class Data(cls.Comparable):
pass
+
class DataFetched(cls.Comparable):
pass
+
class DataDefer(cls.Comparable):
pass
mapper(cls.classes.Data, cls.tables.data)
mapper(cls.classes.DataFetched, cls.tables.data_fetched)
mapper(cls.classes.DataDefer, cls.tables.data_defer, properties={
- "data": deferred(cls.tables.data_defer.c.data)
- })
+ "data": deferred(cls.tables.data_defer.c.data)
+ })
def test_attr_not_inserted(self):
Data = self.classes.Data
# so its not in dict, but also when we hit it, it isn't
# expired because there's no column default on it or anything like that
assert 'data' not in d1.__dict__
+
def go():
eq_(d1.data, None)
sess.flush()
assert 'data' not in d1.__dict__
+
def go():
eq_(d1.data, None)
sess = create_session()
d1 = sess.query(Data).from_statement(
- select([Data.id])).options(undefer(Data.data)).first()
+ select([Data.id])).options(undefer(Data.data)).first()
d1.data = 'd2'
# the deferred loader has to clear out any state
testing.db, go, 1
)
+
class RefreshTest(_fixtures.FixtureTest):
def test_refresh(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), backref='user')
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user')
})
s = create_session()
u = s.query(User).get(7)
s = create_session()
u = s.query(User).get(7)
s.expunge_all()
- assert_raises_message(sa_exc.InvalidRequestError, r"is not persistent within this Session", lambda: s.refresh(u))
+ assert_raises_message(sa_exc.InvalidRequestError,
+ r"is not persistent within this Session",
+ lambda: s.refresh(u))
def test_refresh_expired(self):
User, users = self.classes.User, self.tables.users
assert u.name == 'jack'
def test_refresh_with_lazy(self):
- """test that when a lazy loader is set as a trigger on an object's attribute
- (at the attribute level, not the class level), a refresh() operation doesn't
- fire the lazy loader or create any problems"""
+ """test that when a lazy loader is set as a trigger on an object's
+ attribute (at the attribute level, not the class level), a refresh()
+ operation doesn't fire the lazy loader or create any problems"""
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
s = create_session()
- mapper(User, users, properties={'addresses':relationship(mapper(Address, addresses))})
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))})
q = s.query(User).options(sa.orm.lazyload('addresses'))
- u = q.filter(users.c.id==8).first()
+ u = q.filter(users.c.id == 8).first()
+
def go():
s.refresh(u)
self.assert_sql_count(testing.db, go, 1)
def test_refresh_with_eager(self):
- """test that a refresh/expire operation loads rows properly and sends correct "isnew" state to eager loaders"""
+ """test that a refresh/expire operation loads rows properly and sends
+ correct "isnew" state to eager loaders"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy='joined')
+ 'addresses': relationship(mapper(Address, addresses),
+ lazy='joined')
})
s = create_session()
"""test a hang condition that was occurring on expire/refresh"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
s = create_session()
mapper(Address, addresses)
- mapper(User, users, properties = dict(addresses=relationship(Address,cascade="all, delete-orphan",lazy='joined')) )
+ mapper(User, users, properties=dict(addresses=relationship(
+ Address, cascade="all, delete-orphan", lazy='joined')))
u = User()
- u.name='Justin'
+ u.name = 'Justin'
a = Address(id=10, email_address='lala')
u.addresses.append(a)
s.add(u)
s.flush()
s.expunge_all()
- u = s.query(User).filter(User.name=='Justin').one()
+ u = s.query(User).filter(User.name == 'Justin').one()
s.expire(u)
assert u.name == 'Justin'
s.refresh(u)
-
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users, "
"(SELECT users.id AS id, users.name AS name FROM users) AS anon_1",
- )
+ )
self.assert_compile(
sess.query(users, exists([1], from_obj=addresses)).
with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, EXISTS "
"(SELECT 1 FROM addresses) AS anon_1 FROM users",
- )
+ )
# a little tedious here, adding labels to work around Query's
# auto-labelling.
x = func.lala(users.c.id).label('foo')
self.assert_compile(sess.query(x).filter(x == 5).statement,
- "SELECT lala(users.id) AS foo FROM users WHERE "
- "lala(users.id) = :param_1")
+ "SELECT lala(users.id) AS foo FROM users WHERE "
+ "lala(users.id) = :param_1")
self.assert_compile(sess.query(func.sum(x).label('bar')).statement,
- "SELECT sum(lala(users.id)) AS bar FROM users")
+ "SELECT sum(lala(users.id)) AS bar FROM users")
class FromSelfTest(QueryTest, AssertsCompiledSQL):
def test_from_alias_one(self):
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
+ self.tables.addresses,
+ self.tables.users)
query = users.select(users.c.id == 7).\
union(users.select(users.c.id > 7)).alias('ulist').\
q = sess.query(User)
def go():
- l = list(
+ result = list(
q.options(
contains_alias('ulist'), contains_eager('addresses')).
instances(query.execute()))
- assert self.static.user_address_result == l
+ assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_from_alias_two(self):
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
+ self.tables.addresses,
+ self.tables.users)
query = users.select(users.c.id == 7).\
union(users.select(users.c.id > 7)).alias('ulist').\
q = sess.query(User)
def go():
- l = q.options(
+ result = q.options(
contains_alias('ulist'), contains_eager('addresses')).\
from_statement(query).all()
- assert self.static.user_address_result == l
+ assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_from_alias_three(self):
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
+ self.tables.addresses,
+ self.tables.users)
query = users.select(users.c.id == 7).\
union(users.select(users.c.id > 7)).alias('ulist').\
# better way. use select_entity_from()
def go():
- l = sess.query(User).select_entity_from(query).\
+ result = sess.query(User).select_entity_from(query).\
options(contains_eager('addresses')).all()
- assert self.static.user_address_result == l
+ assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_from_alias_four(self):
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
+ self.tables.addresses,
+ self.tables.users)
sess = create_session()
select(use_labels=True, order_by=[text('ulist.id'), adalias.c.id])
def go():
- l = sess.query(User).select_entity_from(query).\
+ result = sess.query(User).select_entity_from(query).\
options(contains_eager('addresses', alias=adalias)).all()
- assert self.static.user_address_result == l
+ assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
sess = create_session()
q = sess.query(User)
def go():
- l = list(
+ result = list(
q.options(contains_eager('addresses')).
instances(selectquery.execute()))
- assert self.static.user_address_result[0:3] == l
+ assert self.static.user_address_result[0:3] == result
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
- l = list(
+ result = list(
q.options(contains_eager(User.addresses)).
instances(selectquery.execute()))
- assert self.static.user_address_result[0:3] == l
+ assert self.static.user_address_result[0:3] == result
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
- l = q.options(
+ result = q.options(
contains_eager('addresses')).from_statement(selectquery).all()
- assert self.static.user_address_result[0:3] == l
+ assert self.static.user_address_result[0:3] == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_string_alias(self):
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
sess = create_session()
q = sess.query(User)
# string alias name
def go():
- l = list(
+ result = list(
q.options(
contains_eager('addresses', alias="adalias")).
instances(selectquery.execute()))
- assert self.static.user_address_result == l
+ assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_aliased_instances(self):
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
sess = create_session()
q = sess.query(User)
# expression.Alias object
def go():
- l = list(
+ result = list(
q.options(
contains_eager('addresses', alias=adalias)).
instances(selectquery.execute()))
- assert self.static.user_address_result == l
+ assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_aliased(self):
adalias = aliased(Address)
def go():
- l = q.options(
+ result = q.options(
contains_eager('addresses', alias=adalias)
- ).outerjoin(adalias, User.addresses).\
+ ).outerjoin(adalias, User.addresses).\
order_by(User.id, adalias.id)
- assert self.static.user_address_result == l.all()
+ assert self.static.user_address_result == result.all()
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_string_alias(self):
orders, items, users, order_items, User = (self.tables.orders,
- self.tables.items,
- self.tables.users,
- self.tables.order_items,
- self.classes.User)
+ self.tables.items,
+ self.tables.users,
+ self.tables.order_items,
+ self.classes.User)
sess = create_session()
q = sess.query(User)
# test using string alias with more than one level deep
def go():
- l = list(
+ result = list(
q.options(
contains_eager('orders', alias='o1'),
contains_eager('orders.items', alias='i1')
).instances(query.execute()))
- assert self.static.user_order_result == l
+ assert self.static.user_order_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_alias(self):
orders, items, users, order_items, User = (self.tables.orders,
- self.tables.items,
- self.tables.users,
- self.tables.order_items,
- self.classes.User)
+ self.tables.items,
+ self.tables.users,
+ self.tables.order_items,
+ self.classes.User)
sess = create_session()
q = sess.query(User)
# contains_eager('items', alias=ialias)
def go():
- l = list(
+ result = list(
q.options(
contains_eager('orders', alias=oalias),
contains_eager('orders.items', alias=ialias)).
instances(query.execute()))
- assert self.static.user_order_result == l
+ assert self.static.user_order_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_aliased(self):
ialias = aliased(Item)
def go():
- l = q.options(
+ result = q.options(
contains_eager(User.orders, alias=oalias),
contains_eager(User.orders, Order.items, alias=ialias)).\
outerjoin(oalias, User.orders).\
outerjoin(ialias, oalias.items).\
order_by(User.id, oalias.id, ialias.id)
- assert self.static.user_order_result == l.all()
+ assert self.static.user_order_result == result.all()
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_chaining(self):
"""test that contains_eager() 'chains' by default."""
Dingaling, User, Address = (self.classes.Dingaling,
- self.classes.User,
- self.classes.Address)
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
q = sess.query(User).join(User.addresses).join(Address.dingaling).\
an alias at the end."""
Dingaling, User, Address = (self.classes.Dingaling,
- self.classes.User,
- self.classes.Address)
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
da = aliased(Dingaling, name="foob")
# applies context.adapter to result rows. This was
# [ticket:1180].
- l = q.outerjoin(User.orders).options(
+ result = q.outerjoin(User.orders).options(
joinedload(User.addresses), contains_eager(User.orders)). \
order_by(User.id, Order.id).offset(1).limit(2).all()
eq_(
- l, [
+ result, [
User(
id=7,
addresses=[
# are applied by the eager loader
oalias = aliased(Order)
- l = q.outerjoin(oalias, User.orders).options(
+ result = q.outerjoin(oalias, User.orders).options(
joinedload(User.addresses),
contains_eager(User.orders, alias=oalias)). \
order_by(User.id, oalias.id).\
offset(1).limit(2).all()
eq_(
- l,
+ result,
[
User(
id=7,
def test_multi_mappers(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
test_session = create_session()
test_session.query(Address).all()
expected = [(user7, address1),
- (user8, address2),
- (user8, address3),
- (user8, address4),
- (user9, address5),
- (user10, None)]
+ (user8, address2),
+ (user8, address3),
+ (user8, address4),
+ (user9, address5),
+ (user10, None)]
sess = create_session()
def test_aliased_multi_mappers(self):
User, addresses, users, Address = (self.classes.User,
- self.tables.addresses,
- self.tables.users,
- self.classes.Address)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.Address)
sess = create_session()
sess.query(Address).all()
expected = [(user7, address1),
- (user8, address2),
- (user8, address3),
- (user8, address4),
- (user9, address5),
- (user10, None)]
+ (user8, address2),
+ (user8, address3),
+ (user8, address4),
+ (user9, address5),
+ (user10, None)]
q = sess.query(User)
adalias = addresses.alias('adalias')
q = q.add_entity(Address, alias=adalias). \
select_entity_from(users.outerjoin(adalias))
- l = q.order_by(User.id, adalias.c.id).all()
- assert l == expected
+ result = q.order_by(User.id, adalias.c.id).all()
+ assert result == expected
sess.expunge_all()
q = sess.query(User).add_entity(Address, alias=adalias)
- l = q.select_entity_from(users.outerjoin(adalias)). \
+ result = q.select_entity_from(users.outerjoin(adalias)). \
filter(adalias.c.email_address == 'ed@bettyboop.com').all()
- assert l == [(user8, address3)]
+ assert result == [(user8, address3)]
def test_with_entities(self):
User, Address = self.classes.User, self.classes.Address
"""test aliased/nonalised joins with the usage of add_column()"""
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
expected = [(user7, 1),
- (user8, 3),
- (user9, 1),
- (user10, 0)
- ]
+ (user8, 3),
+ (user9, 1),
+ (user10, 0)
+ ]
q = sess.query(User)
q = q.group_by(users).order_by(User.id).outerjoin('addresses').\
select_from(users.outerjoin(addresses)). \
group_by(*[c for c in users.c]).order_by(User.id)
q = sess.query(User)
- l = q.add_column("count").from_statement(s).all()
- assert l == expected
+ result = q.add_column("count").from_statement(s).all()
+ assert result == expected
def test_raw_columns(self):
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
adalias = addresses.alias()
q = create_session().query(User).add_column(func.count(adalias.c.id))\
- .add_column(("Name:" + users.c.name)).outerjoin(adalias, 'addresses')\
+ .add_column(("Name:" + users.c.name))\
+ .outerjoin(adalias, 'addresses')\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
from_obj=[users.outerjoin(addresses)],
group_by=[c for c in users.c], order_by=[users.c.id])
q = create_session().query(User)
- l = q.add_column("count").add_column("concat").from_statement(s).all()
- assert l == expected
+ result = q.add_column("count").add_column("concat") \
+ .from_statement(s).all()
+ assert result == expected
sess.expunge_all()
# test with select_entity_from()
- q = create_session().query(User).add_column(func.count(addresses.c.id))\
- .add_column(("Name:" + users.c.name)).select_entity_from(users.outerjoin(addresses))\
+ q = create_session().query(User) \
+ .add_column(func.count(addresses.c.id)) \
+ .add_column(("Name:" + users.c.name)) \
+ .select_entity_from(users.outerjoin(addresses)) \
.group_by(users).order_by(users.c.id)
assert q.all() == expected
sess.expunge_all()
- q = create_session().query(User).add_column(func.count(addresses.c.id))\
+ q = create_session().query(User) \
+ .add_column(func.count(addresses.c.id)) \
.add_column(("Name:" + users.c.name)).outerjoin('addresses')\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
sess.expunge_all()
- q = create_session().query(User).add_column(func.count(adalias.c.id))\
- .add_column(("Name:" + users.c.name)).outerjoin(adalias, 'addresses')\
+ q = create_session().query(User).add_column(func.count(adalias.c.id)) \
+ .add_column(("Name:" + users.c.name)) \
+ .outerjoin(adalias, 'addresses') \
.group_by(users).order_by(users.c.id)
assert q.all() == expected
def test_join_relname_from_selected_from(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
- mapper(User, users, properties=
- {'addresses': relationship(mapper(Address, addresses),
- backref='user')})
+ mapper(User, users, properties={'addresses': relationship(
+ mapper(Address, addresses), backref='user')})
sess = create_session()
def test_filter_by_selected_from(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
- mapper(User, users, properties=
- {'addresses': relationship(mapper(Address, addresses))})
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))})
sess = create_session()
def test_join_ent_selected_from(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
- mapper(User, users, properties=
- {'addresses': relationship(mapper(Address, addresses))})
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))})
sess = create_session()
"FROM addresses JOIN users ON users.id = addresses.user_id"
)
-
def test_join(self):
users, Address, addresses, User = (
self.tables.users, self.classes.Address, self.tables.addresses,
def test_replace_with_eager(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(
User, users, properties={
User(id=7, addresses=[Address(id=1)]),
User(
id=8, addresses=[Address(id=2), Address(id=3),
- Address(id=4)])])
+ Address(id=4)])])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
[
User(
id=8, addresses=[Address(id=2), Address(id=3),
- Address(id=4)])])
+ Address(id=4)])])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
select_entity_from(sel).order_by(User.id)[1],
User(
id=8, addresses=[Address(id=2), Address(id=3),
- Address(id=4)]))
+ Address(id=4)]))
self.assert_sql_count(testing.db, go, 1)
selectables."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(
User, users, properties={
(5, 9, 18, 1)])
def test_external_columns_joinedload(self):
- users, orders, User, Address, Order, addresses = (self.tables.users,
- self.tables.orders,
- self.classes.User,
- self.classes.Address,
- self.classes.Order,
- self.tables.addresses)
+ users, orders, User, Address, Order, addresses = \
+ (self.tables.users,
+ self.tables.orders,
+ self.classes.User,
+ self.classes.Address,
+ self.classes.Order,
+ self.tables.addresses)
# in this test, we have a subquery on User that accesses "addresses",
# underneath an joinedload for "addresses". So the "addresses" alias
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer, sa.Sequence('foo_id_seq'), primary_key=True),
+ Column('id', Integer, sa.Sequence('foo_id_seq'),
+ primary_key=True),
Column('bar', Integer),
Column('range', Integer))
sess = create_session()
query = sess.query(Foo)
assert query.count() == 100
- assert sess.query(func.min(foo.c.bar)).filter(foo.c.bar<30).one() == (0,)
-
- assert sess.query(func.max(foo.c.bar)).filter(foo.c.bar<30).one() == (29,)
- assert next(query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)))[0] == 29
- assert next(query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)))[0] == 29
-
- @testing.fails_if(lambda:testing.against('mysql+mysqldb') and
- testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'),
- "unknown incompatibility")
+ assert sess.query(func.min(foo.c.bar)).filter(foo.c.bar < 30) \
+ .one() == (0,)
+
+ assert sess.query(func.max(foo.c.bar)).filter(foo.c.bar < 30) \
+ .one() == (29,)
+ assert next(query.filter(foo.c.bar < 30).values(
+ sa.func.max(foo.c.bar)))[0] == 29
+ assert next(query.filter(foo.c.bar < 30).values(
+ sa.func.max(foo.c.bar)))[0] == 29
+
+ @testing.fails_if(
+ lambda: testing.against('mysql+mysqldb') and
+ testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'),
+ "unknown incompatibility")
def test_aggregate_1(self):
foo = self.tables.foo
-
query = create_session().query(func.sum(foo.c.bar))
- assert query.filter(foo.c.bar<30).one() == (435,)
+ assert query.filter(foo.c.bar < 30).one() == (435,)
@testing.fails_on('firebird', 'FIXME: unknown')
- @testing.fails_on('mssql', 'AVG produces an average as the original column type on mssql.')
+ @testing.fails_on(
+ 'mssql',
+ 'AVG produces an average as the original column type on mssql.')
def test_aggregate_2(self):
foo = self.tables.foo
avg = query.filter(foo.c.bar < 30).one()[0]
eq_(float(round(avg, 1)), 14.5)
- @testing.fails_on('mssql', 'AVG produces an average as the original column type on mssql.')
+ @testing.fails_on(
+ 'mssql',
+ 'AVG produces an average as the original column type on mssql.')
def test_aggregate_3(self):
foo, Foo = self.tables.foo, self.classes.Foo
query = create_session().query(Foo)
- avg_f = next(query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)))[0]
+ avg_f = next(query.filter(foo.c.bar < 30).values(
+ sa.func.avg(foo.c.bar)))[0]
assert float(round(avg_f, 1)) == 14.5
- avg_o = next(query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)))[0]
+ avg_o = next(query.filter(foo.c.bar < 30).values(
+ sa.func.avg(foo.c.bar)))[0]
assert float(round(avg_o, 1)) == 14.5
def test_filter(self):
class Obj1(cls.Basic):
pass
+
class Obj2(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
addresses, Order, User, Address, orders, users = (cls.tables.addresses,
- cls.classes.Order,
- cls.classes.User,
- cls.classes.Address,
- cls.tables.orders,
- cls.tables.users)
+ cls.classes.Order,
+ cls.classes.User,
+ cls.classes.Address,
+ cls.tables.orders,
+ cls.tables.users)
mapper(User, users, properties={
- 'orders':relationship(mapper(Order, orders, properties={
- 'addresses':relationship(mapper(Address, addresses))}))})
-
+ 'orders': relationship(mapper(Order, orders, properties={
+ 'addresses': relationship(mapper(Address, addresses))}))})
def test_join(self):
"""Query.join"""
User, Address = self.classes.User, self.classes.Address
-
session = create_session()
q = (session.query(User).join('orders', 'addresses').
filter(Address.id == 1))
self.classes.User,
self.classes.Address)
-
session = create_session()
q = (session.query(User).outerjoin('orders', 'addresses').
- filter(sa.or_(Order.id == None, Address.id == 1)))
+ filter(sa.or_(Order.id == None, Address.id == 1))) # noqa
eq_(set([User(id=7), User(id=8), User(id=10)]),
set(q.all()))
self.classes.User,
self.classes.Address)
-
session = create_session()
q = (session.query(User).outerjoin('orders', 'addresses').
- filter(sa.or_(Order.id == None, Address.id == 1)))
+ filter(sa.or_(Order.id == None, Address.id == 1))) # noqa
eq_(q.count(), 4)
def test_from(self):
- users, Order, User, Address, orders, addresses = (self.tables.users,
- self.classes.Order,
- self.classes.User,
- self.classes.Address,
- self.tables.orders,
- self.tables.addresses)
+ users, Order, User, Address, orders, addresses = \
+ (self.tables.users,
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address,
+ self.tables.orders,
+ self.tables.addresses)
session = create_session()
sel = users.outerjoin(orders).outerjoin(
addresses, orders.c.address_id == addresses.c.id)
q = (session.query(User).select_from(sel).
- filter(sa.or_(Order.id == None, Address.id == 1)))
+ filter(sa.or_(Order.id == None, Address.id == 1))) # noqa
eq_(set([User(id=7), User(id=8), User(id=10)]),
set(q.all()))
@classmethod
def define_tables(cls, metadata):
Table('Table1', metadata,
- Column('ID', Integer, primary_key=True))
+ Column('ID', Integer, primary_key=True))
Table('Table2', metadata,
Column('T1ID', Integer, ForeignKey("Table1.ID"),
primary_key=True),
class Obj1(cls.Basic):
pass
+
class Obj2(cls.Basic):
pass
q = create_session(bind=testing.db).query(Obj1)
assert q.count() == 4
- res = q.filter(sa.and_(Table1.c.ID==Table2.c.T1ID,Table2.c.T1ID==1))
+ res = q.filter(
+ sa.and_(Table1.c.ID == Table2.c.T1ID, Table2.c.T1ID == 1))
assert res.count() == 3
- res = q.filter(sa.and_(Table1.c.ID==Table2.c.T1ID,Table2.c.T1ID==1)).distinct()
+ res = q.filter(sa.and_(Table1.c.ID == Table2.c.T1ID,
+ Table2.c.T1ID == 1)).distinct()
eq_(res.count(), 1)
-
-
fk_args = dict(onupdate='cascade')
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('users.id', **fk_args)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.id', **fk_args)))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
def setup_mappers(cls):
mapper(cls.classes.Address, cls.tables.addresses)
mapper(cls.classes.User, cls.tables.users, properties={
- 'addresses':relationship(cls.classes.Address,
- cascade='all, delete-orphan'),
+ 'addresses': relationship(cls.classes.Address,
+ cascade='all, delete-orphan'),
})
def _assert_hasparent(self, a1):
- assert attributes.has_parent(
- self.classes.User, a1, "addresses")
+ assert attributes.has_parent(self.classes.User, a1, "addresses")
def _assert_not_hasparent(self, a1):
- assert not attributes.has_parent(
- self.classes.User, a1, "addresses")
+ assert not attributes.has_parent(self.classes.User, a1, "addresses")
def _fixture(self):
User, Address = self.classes.User, self.classes.Address
# so the remove will unset the hasparent flag.
# this is what has occurred historically in any case.
self._assert_not_hasparent(a1)
- #self._assert_hasparent(a1)
+ # self._assert_hasparent(a1)
@testing.requires.predictable_gc
def test_stale_state_negative(self):
u1.addresses.remove(a1)
self._assert_not_hasparent(a1)
-
-
def test_basic_option(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
sess = create_session()
- l = sess.query(User).options(immediateload(User.addresses)).filter(users.c.id==7).all()
+ result = sess.query(User).options(immediateload(
+ User.addresses)).filter(users.c.id == 7).all()
eq_(len(sess.identity_map), 2)
sess.close()
eq_(
- [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])],
- l
+ [User(id=7,
+ addresses=[Address(id=1, email_address='jack@bean.com')])],
+ result
)
-
def test_basic(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, lazy='immediate')
+ 'addresses': relationship(Address, lazy='immediate')
})
sess = create_session()
- l = sess.query(User).filter(users.c.id==7).all()
+ result = sess.query(User).filter(users.c.id == 7).all()
eq_(len(sess.identity_map), 2)
sess.close()
eq_(
- [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])],
- l
+ [User(id=7,
+ addresses=[Address(id=1, email_address='jack@bean.com')])],
+ result
)
-
-
from sqlalchemy.orm.attributes import instance_state, NO_VALUE
from sqlalchemy import testing
+
class TestORMInspection(_fixtures.FixtureTest):
@classmethod
def setup_mappers(cls):
cls._setup_stock_mapping()
inspect(cls.classes.User).add_property(
- "name_syn",synonym("name")
+ "name_syn", synonym("name")
)
def test_class_mapper(self):
assert inspect(User) is class_mapper(User)
-
def test_column_collection_iterate(self):
User = self.classes.User
user_table = self.tables.users
user_table = self.tables.users
insp = inspect(User)
eq_(insp.primary_key,
- (user_table.c.id,)
- )
+ (user_table.c.id,))
def test_local_table(self):
User = self.classes.User
def test_mapper_selectable_fixed(self):
from sqlalchemy.orm import mapper
+
class Foo(object):
pass
+
class Bar(Foo):
pass
user_table = self.tables.users
addresses_table = self.tables.addresses
mapper(Foo, user_table, with_polymorphic=(Bar,))
mapper(Bar, addresses_table, inherits=Foo, properties={
- 'address_id': addresses_table.c.id
- })
+ 'address_id': addresses_table.c.id
+ })
i1 = inspect(Foo)
i2 = inspect(Foo)
assert i1.selectable is i2.selectable
assert not hasattr(prop, 'columns')
assert hasattr(prop, 'expression')
-
def test_extension_types(self):
from sqlalchemy.ext.associationproxy import \
- association_proxy, ASSOCIATION_PROXY
+ association_proxy, ASSOCIATION_PROXY
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method, \
- HYBRID_PROPERTY, HYBRID_METHOD
+ HYBRID_PROPERTY, HYBRID_METHOD
from sqlalchemy import Table, MetaData, Integer, Column
from sqlalchemy.orm import mapper
from sqlalchemy.orm.interfaces import NOT_EXTENSION
raise NotImplementedError()
t = Table('sometable', MetaData(),
- Column('id', Integer, primary_key=True))
+ Column('id', Integer, primary_key=True))
mapper(SomeClass, t)
mapper(SomeSubClass, inherits=SomeClass)
insp = inspect(SomeSubClass)
eq_(
dict((k, v.extension_type)
- for k, v in list(insp.all_orm_descriptors.items())
- ),
+ for k, v in list(insp.all_orm_descriptors.items())),
{
'id': NOT_EXTENSION,
'name': NOT_EXTENSION,
eq_(
(insp.transient, insp.pending,
- insp.persistent, insp.detached),
+ insp.persistent, insp.detached),
(True, False, False, False)
)
s = Session(testing.db)
eq_(
(insp.transient, insp.pending,
- insp.persistent, insp.detached),
+ insp.persistent, insp.detached),
(False, True, False, False)
)
s.flush()
eq_(
(insp.transient, insp.pending,
- insp.persistent, insp.detached),
+ insp.persistent, insp.detached),
(False, False, True, False)
)
s.expunge(u1)
eq_(
(insp.transient, insp.pending,
- insp.persistent, insp.detached),
+ insp.persistent, insp.detached),
(False, False, False, True)
)
u1 = User(name='ed')
insp = inspect(u1)
is_(insp.object, u1)
-
instrumentation.register_class(cls)
ne_(cls.__init__, original_init)
manager = instrumentation.manager_of_class(cls)
+
def init(state, args, kwargs):
canary.append((cls, 'init', state.class_))
event.listen(manager, 'init', init, raw=True)
del inits[:]
obj = C()
eq_(inits, [(C, 'init', C), (C, '__init__'), (B, '__init__'),
- (A, '__init__')])
+ (A, '__init__')])
def test_Ai_bi_Ci(self):
inits = []
del inits[:]
obj = C()
eq_(inits, [(C, 'init', C), (C, '__init__'), (B, '__init__'),
- (A, '__init__')])
+ (A, '__init__')])
def test_Ai_b_Ci(self):
inits = []
mapper, A, self.fixture()
)
+
class OnLoadTest(fixtures.ORMTest):
"""Check that Events.load is not hit in regular attributes operations."""
import pickle
global A
+
class A(object):
pass
sa = instrumentation.ClassManager.STATE_ATTR
ma = instrumentation.ClassManager.MANAGER_ATTR
- fails = lambda method, attr: assert_raises(
+ def fails(method, attr): return assert_raises(
KeyError, getattr(manager, method), attr, property())
fails('install_member', sa)
pass
assert_raises(KeyError, mapper, T, t)
+
class Py3KFunctionInstTest(fixtures.ORMTest):
__requires__ = ("python3", )
-
def _instrument(self, cls):
manager = instrumentation.register_class(cls)
canary = []
+
def check(target, args, kwargs):
canary.append((args, kwargs))
event.listen(manager, "init", check)
cls, "a", "b", c="c"
)
+
if util.py3k:
_locals = {}
exec("""
for k in _locals:
setattr(Py3KFunctionInstTest, k, _locals[k])
+
class MiscTest(fixtures.ORMTest):
"""Seems basic, but not directly covered elsewhere!"""
t = Table('t', MetaData(),
Column('id', Integer, primary_key=True),
Column('x', Integer))
+
class A(object):
pass
mapper(A, t)
t2 = Table('t2', m,
Column('id', Integer, primary_key=True),
Column('t1_id', Integer, ForeignKey('t1.id')))
+
class A(object):
pass
+
class B(object):
pass
mapper(A, t1, properties=dict(bs=relationship(B)))
for base in object, Base:
class A(base):
pass
+
class B(base):
pass
mapper(A, t1, properties=dict(bs=relationship(B, backref='a')))
class Base(object):
def __init__(self):
pass
+
class Base_AKW(object):
def __init__(self, *args, **kwargs):
pass
for base in object, Base, Base_AKW:
class A(base):
pass
+
class B(base):
pass
mapper(A, t1)
session = create_session()
session.add(a)
assert b in session, 'base: %s' % base
-
-
from sqlalchemy.orm.util import join, outerjoin, with_parent
+
class QueryTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
-
@classmethod
def setup_mappers(cls):
Node, composite_pk_table, users, Keyword, items, Dingaling, \
cls.classes.Order, cls.tables.orders, cls.tables.addresses
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', order_by=addresses.c.id),
- 'orders':relationship(Order, backref='user', order_by=orders.c.id), # o2m, m2o
+ 'addresses': relationship(Address, backref='user',
+ order_by=addresses.c.id),
+ # o2m, m2o
+ 'orders': relationship(Order, backref='user', order_by=orders.c.id)
})
mapper(Address, addresses, properties={
- 'dingaling':relationship(Dingaling, uselist=False, backref="address") #o2o
+ # o2o
+ 'dingaling': relationship(Dingaling, uselist=False,
+ backref="address")
})
mapper(Dingaling, dingalings)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, order_by=items.c.id), #m2m
- 'address':relationship(Address), # m2o
+ # m2m
+ 'items': relationship(Item, secondary=order_items,
+ order_by=items.c.id),
+ 'address': relationship(Address), # m2o
})
mapper(Item, items, properties={
- 'keywords':relationship(Keyword, secondary=item_keywords) #m2m
+ 'keywords': relationship(Keyword, secondary=item_keywords) # m2m
})
mapper(Keyword, keywords)
mapper(Node, nodes, properties={
- 'children':relationship(Node,
- backref=backref('parent', remote_side=[nodes.c.id])
- )
+ 'children': relationship(Node,
+ backref=backref(
+ 'parent', remote_side=[nodes.c.id]))
})
mapper(CompositePk, composite_pk_table)
configure_mappers()
+
class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
Table('companies', metadata,
- Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('company_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
Table('people', metadata,
- Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('company_id', Integer, ForeignKey('companies.company_id')),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('company_id', Integer,
+ ForeignKey('companies.company_id')),
+ Column('name', String(50)),
+ Column('type', String(30)))
Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
- Column('status', String(30)),
- Column('engineer_name', String(50)),
- Column('primary_language', String(50)),
- )
+ Column('person_id', Integer, ForeignKey(
+ 'people.person_id'), primary_key=True),
+ Column('status', String(30)),
+ Column('engineer_name', String(50)),
+ Column('primary_language', String(50)))
Table('machines', metadata,
- Column('machine_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('engineer_id', Integer, ForeignKey('engineers.person_id')))
+ Column('machine_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('engineer_id', Integer,
+ ForeignKey('engineers.person_id')))
Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
- Column('status', String(30)),
- Column('manager_name', String(50))
- )
+ Column('person_id', Integer, ForeignKey(
+ 'people.person_id'), primary_key=True),
+ Column('status', String(30)),
+ Column('manager_name', String(50)))
Table('boss', metadata,
- Column('boss_id', Integer, ForeignKey('managers.person_id'), primary_key=True),
- Column('golf_swing', String(30)),
- )
+ Column('boss_id', Integer, ForeignKey(
+ 'managers.person_id'), primary_key=True),
+ Column('golf_swing', String(30)),
+ )
Table('paperwork', metadata,
- Column('paperwork_id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('description', String(50)),
- Column('person_id', Integer, ForeignKey('people.person_id')))
+ Column('paperwork_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('description', String(50)),
+ Column('person_id', Integer, ForeignKey('people.person_id')))
@classmethod
def setup_classes(cls):
- paperwork, people, companies, boss, managers, machines, engineers = (cls.tables.paperwork,
- cls.tables.people,
- cls.tables.companies,
- cls.tables.boss,
- cls.tables.managers,
- cls.tables.machines,
- cls.tables.engineers)
+ paperwork, people, companies, boss, managers, machines, engineers = (
+ cls.tables.paperwork,
+ cls.tables.people,
+ cls.tables.companies,
+ cls.tables.boss,
+ cls.tables.managers,
+ cls.tables.machines,
+ cls.tables.engineers)
class Company(cls.Comparable):
pass
+
class Person(cls.Comparable):
pass
+
class Engineer(Person):
pass
+
class Manager(Person):
pass
+
class Boss(Manager):
pass
+
class Machine(cls.Comparable):
pass
+
class Paperwork(cls.Comparable):
pass
mapper(Company, companies, properties={
- 'employees':relationship(Person, order_by=people.c.person_id)
+ 'employees': relationship(Person, order_by=people.c.person_id)
})
mapper(Machine, machines)
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- properties={
- 'paperwork':relationship(Paperwork, order_by=paperwork.c.paperwork_id)
- })
- mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer', properties={
- 'machines':relationship(Machine, order_by=machines.c.machine_id)
- })
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ properties={
+ 'paperwork': relationship(Paperwork,
+ order_by=paperwork.c.paperwork_id)
+ })
+ mapper(Engineer, engineers, inherits=Person,
+ polymorphic_identity='engineer',
+ properties={'machines': relationship(
+ Machine, order_by=machines.c.machine_id)})
mapper(Manager, managers,
- inherits=Person, polymorphic_identity='manager')
+ inherits=Person, polymorphic_identity='manager')
mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
mapper(Paperwork, paperwork)
self.assert_compile(
sess.query(Company).join(Company.employees),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies JOIN people ON companies.company_id = people.company_id"
- , use_default_dialect = True
- )
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
+ "FROM companies JOIN people "
+ "ON companies.company_id = people.company_id",
+ use_default_dialect=True)
def test_force_via_select_from(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = create_session()
self.assert_compile(
- sess.query(Company).\
- filter(Company.company_id==Engineer.company_id).\
- filter(Engineer.primary_language=='java'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
+ sess.query(Company)
+ .filter(Company.company_id == Engineer.company_id)
+ .filter(Engineer.primary_language == 'java'),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
"FROM companies, people, engineers "
- "WHERE companies.company_id = people.company_id AND engineers.primary_language "
- "= :primary_language_1",
- use_default_dialect=True
- )
+ "WHERE companies.company_id = people.company_id "
+ "AND engineers.primary_language "
+ "= :primary_language_1", use_default_dialect=True)
self.assert_compile(
- sess.query(Company).select_from(Company, Engineer).\
- filter(Company.company_id==Engineer.company_id).\
- filter(Engineer.primary_language=='java'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies, people JOIN engineers ON people.person_id = engineers.person_id "
- "WHERE companies.company_id = people.company_id AND engineers.primary_language ="
- " :primary_language_1",
- use_default_dialect=True
-
- )
+ sess.query(Company).select_from(Company, Engineer)
+ .filter(Company.company_id == Engineer.company_id)
+ .filter(Engineer.primary_language == 'java'),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
+ "FROM companies, people JOIN engineers "
+ "ON people.person_id = engineers.person_id "
+ "WHERE companies.company_id = people.company_id "
+ "AND engineers.primary_language ="
+ " :primary_language_1", use_default_dialect=True)
def test_single_prop_of_type(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
- "(people JOIN engineers ON people.person_id = engineers.person_id) "
- "ON companies.company_id = people.company_id"
- , use_default_dialect = True
- )
+ "(people JOIN engineers "
+ "ON people.person_id = engineers.person_id) "
+ "ON companies.company_id = people.company_id",
+ use_default_dialect=True)
def test_prop_with_polymorphic_1(self):
Person, Manager, Paperwork = (self.classes.Person,
- self.classes.Manager,
- self.classes.Paperwork)
+ self.classes.Manager,
+ self.classes.Paperwork)
sess = create_session()
self.assert_compile(
sess.query(Person).with_polymorphic(Manager).
- order_by(Person.person_id).
- join('paperwork').filter(Paperwork.description.like('%review%')),
- "SELECT people.person_id AS people_person_id, people.company_id AS"
- " people_company_id, "
- "people.name AS people_name, people.type AS people_type, managers.person_id "
- "AS managers_person_id, "
- "managers.status AS managers_status, managers.manager_name AS "
- "managers_manager_name FROM people "
- "LEFT OUTER JOIN managers ON people.person_id = managers.person_id JOIN "
- "paperwork ON people.person_id = "
- "paperwork.person_id WHERE paperwork.description LIKE :description_1 "
- "ORDER BY people.person_id"
- , use_default_dialect=True
- )
+ order_by(Person.person_id).join('paperwork')
+ .filter(Paperwork.description.like('%review%')),
+ "SELECT people.person_id AS people_person_id, people.company_id AS"
+ " people_company_id, "
+ "people.name AS people_name, people.type AS people_type, "
+ "managers.person_id AS managers_person_id, "
+ "managers.status AS managers_status, managers.manager_name AS "
+ "managers_manager_name FROM people "
+ "LEFT OUTER JOIN managers "
+ "ON people.person_id = managers.person_id "
+ "JOIN paperwork "
+ "ON people.person_id = paperwork.person_id "
+ "WHERE paperwork.description LIKE :description_1 "
+ "ORDER BY people.person_id", use_default_dialect=True)
def test_prop_with_polymorphic_2(self):
Person, Manager, Paperwork = (self.classes.Person,
- self.classes.Manager,
- self.classes.Paperwork)
+ self.classes.Manager,
+ self.classes.Paperwork)
sess = create_session()
self.assert_compile(
sess.query(Person).with_polymorphic(Manager).
- order_by(Person.person_id).
- join('paperwork', aliased=True).
- filter(Paperwork.description.like('%review%')),
- "SELECT people.person_id AS people_person_id, people.company_id AS people_company_id, "
- "people.name AS people_name, people.type AS people_type, managers.person_id "
- "AS managers_person_id, "
- "managers.status AS managers_status, managers.manager_name AS managers_manager_name "
- "FROM people LEFT OUTER JOIN managers ON people.person_id = managers.person_id JOIN "
- "paperwork AS paperwork_1 ON people.person_id = paperwork_1.person_id "
- "WHERE paperwork_1.description LIKE :description_1 ORDER BY people.person_id"
- , use_default_dialect=True
- )
+ order_by(Person.person_id).join('paperwork', aliased=True)
+ .filter(Paperwork.description.like('%review%')),
+ "SELECT people.person_id AS people_person_id, "
+ "people.company_id AS people_company_id, "
+ "people.name AS people_name, people.type AS people_type, "
+ "managers.person_id AS managers_person_id, "
+ "managers.status AS managers_status, "
+ "managers.manager_name AS managers_manager_name "
+ "FROM people LEFT OUTER JOIN managers "
+ "ON people.person_id = managers.person_id "
+ "JOIN paperwork AS paperwork_1 "
+ "ON people.person_id = paperwork_1.person_id "
+ "WHERE paperwork_1.description "
+ "LIKE :description_1 ORDER BY people.person_id",
+ use_default_dialect=True)
def test_explicit_polymorphic_join_one(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = create_session()
self.assert_compile(
- sess.query(Company).join(Engineer).filter(Engineer.engineer_name=='vlad'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS "
- "companies_name "
+ sess.query(Company).join(Engineer)
+ .filter(Engineer.engineer_name == 'vlad'),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
"FROM companies JOIN (people JOIN engineers "
- "ON people.person_id = engineers.person_id) "
+ "ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
- "WHERE engineers.engineer_name = :engineer_name_1"
- , use_default_dialect=True
- )
+ "WHERE engineers.engineer_name = :engineer_name_1",
+ use_default_dialect=True)
def test_explicit_polymorphic_join_two(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = create_session()
self.assert_compile(
- sess.query(Company).join(Engineer, Company.company_id==Engineer.company_id).
- filter(Engineer.engineer_name=='vlad'),
- "SELECT companies.company_id AS companies_company_id, companies.name "
- "AS companies_name "
+ sess.query(Company)
+ .join(Engineer, Company.company_id == Engineer.company_id)
+ .filter(Engineer.engineer_name == 'vlad'),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
"FROM companies JOIN "
- "(people JOIN engineers ON people.person_id = engineers.person_id) "
+ "(people JOIN engineers "
+ "ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
- "WHERE engineers.engineer_name = :engineer_name_1"
- , use_default_dialect=True
- )
+ "WHERE engineers.engineer_name = :engineer_name_1",
+ use_default_dialect=True)
def test_multiple_adaption(self):
"""test that multiple filter() adapters get chained together "
and work correctly within a multiple-entry join()."""
- people, Company, Machine, engineers, machines, Engineer = (self.tables.people,
- self.classes.Company,
- self.classes.Machine,
- self.tables.engineers,
- self.tables.machines,
- self.classes.Engineer)
-
+ people, Company, Machine, engineers, machines, Engineer = (
+ self.tables.people,
+ self.classes.Company,
+ self.classes.Machine,
+ self.tables.engineers,
+ self.tables.machines,
+ self.classes.Engineer)
sess = create_session()
self.assert_compile(
- sess.query(Company).join(people.join(engineers), Company.employees).
- filter(Engineer.name=='dilbert'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS "
- "companies_name "
+ sess.query(Company)
+ .join(people.join(engineers), Company.employees)
+ .filter(Engineer.name == 'dilbert'),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
"FROM companies JOIN (people "
"JOIN engineers ON people.person_id = "
"engineers.person_id) ON companies.company_id = "
- "people.company_id WHERE people.name = :name_1"
- , use_default_dialect = True
+ "people.company_id WHERE people.name = :name_1",
+ use_default_dialect=True
)
mach_alias = machines.select()
self.assert_compile(
- sess.query(Company).join(people.join(engineers), Company.employees).
- join(mach_alias, Engineer.machines, from_joinpoint=True).
- filter(Engineer.name=='dilbert').filter(Machine.name=='foo'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS "
- "companies_name "
+ sess.query(Company).join(people.join(engineers), Company.employees)
+ .join(mach_alias, Engineer.machines, from_joinpoint=True).
+ filter(Engineer.name == 'dilbert').filter(Machine.name == 'foo'),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name "
"FROM companies JOIN (people "
"JOIN engineers ON people.person_id = "
"engineers.person_id) ON companies.company_id = "
"people.company_id JOIN "
- "(SELECT machines.machine_id AS machine_id, machines.name AS name, "
+ "(SELECT machines.machine_id AS machine_id, "
+ "machines.name AS name, "
"machines.engineer_id AS engineer_id "
- "FROM machines) AS anon_1 ON engineers.person_id = anon_1.engineer_id "
- "WHERE people.name = :name_1 AND anon_1.name = :name_2"
- , use_default_dialect = True
+ "FROM machines) AS anon_1 "
+ "ON engineers.person_id = anon_1.engineer_id "
+ "WHERE people.name = :name_1 AND anon_1.name = :name_2",
+ use_default_dialect=True
)
def test_auto_aliasing_multi_link(self):
sess = create_session()
Company, Engineer, Manager, Boss = self.classes.Company, \
- self.classes.Engineer, \
- self.classes.Manager, self.classes.Boss
+ self.classes.Engineer, \
+ self.classes.Manager, self.classes.Boss
q = sess.query(Company).\
- join(Company.employees.of_type(Engineer)).\
- join(Company.employees.of_type(Manager)).\
- join(Company.employees.of_type(Boss))
+ join(Company.employees.of_type(Engineer)).\
+ join(Company.employees.of_type(Manager)).\
+ join(Company.employees.of_type(Boss))
- self.assert_compile(q,
+ self.assert_compile(
+ q,
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
- "JOIN (people JOIN engineers ON people.person_id = engineers.person_id) "
+ "JOIN (people JOIN engineers "
+ "ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id "
"JOIN (people AS people_1 JOIN managers AS managers_1 "
- "ON people_1.person_id = managers_1.person_id) "
- "ON companies.company_id = people_1.company_id "
+ "ON people_1.person_id = managers_1.person_id) "
+ "ON companies.company_id = people_1.company_id "
"JOIN (people AS people_2 JOIN managers AS managers_2 "
- "ON people_2.person_id = managers_2.person_id JOIN boss AS boss_1 "
- "ON managers_2.person_id = boss_1.boss_id) "
+ "ON people_2.person_id = managers_2.person_id JOIN boss AS boss_1 "
+ "ON managers_2.person_id = boss_1.boss_id) "
"ON companies.company_id = people_2.company_id",
- use_default_dialect=True
- )
+ use_default_dialect=True)
class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
self.assert_compile(
sess.query(User).join("orders", "items"),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
- "JOIN orders ON users.id = orders.user_id JOIN order_items AS order_items_1 "
- "ON orders.id = order_items_1.order_id JOIN items ON items.id = order_items_1.item_id"
+ "JOIN orders ON users.id = orders.user_id "
+ "JOIN order_items AS order_items_1 "
+ "ON orders.id = order_items_1.order_id JOIN items "
+ "ON items.id = order_items_1.item_id"
)
- # test overlapping paths. User->orders is used by both joins, but rendered once.
+ # test overlapping paths. User->orders is used by both joins, but
+ # rendered once.
self.assert_compile(
- sess.query(User).join("orders", "items").join("orders", "address"),
- "SELECT users.id AS users_id, users.name AS users_name FROM users JOIN orders "
- "ON users.id = orders.user_id JOIN order_items AS order_items_1 ON orders.id = "
- "order_items_1.order_id JOIN items ON items.id = order_items_1.item_id JOIN addresses "
- "ON addresses.id = orders.address_id"
- )
+ sess.query(User).join("orders", "items").join(
+ "orders", "address"),
+ "SELECT users.id AS users_id, users.name AS users_name FROM users "
+ "JOIN orders "
+ "ON users.id = orders.user_id "
+ "JOIN order_items AS order_items_1 "
+ "ON orders.id = order_items_1.order_id "
+ "JOIN items ON items.id = order_items_1.item_id JOIN addresses "
+ "ON addresses.id = orders.address_id")
def test_invalid_kwarg_join(self):
User = self.classes.User
"FROM users FULL OUTER JOIN orders ON users.id = orders.user_id"
)
-
def test_multi_tuple_form(self):
"""test the 'tuple' form of join, now superseded
by the two-element join() form.
"""
Item, Order, User = (self.classes.Item,
- self.classes.Order,
- self.classes.User)
-
+ self.classes.Order,
+ self.classes.User)
sess = create_session()
- #assert_raises(
+ # assert_raises(
# sa.exc.SADeprecationWarning,
# sess.query(User).join, (Order, User.id==Order.user_id)
- #)
+ # )
self.assert_compile(
sess.query(User).join((Order, User.id == Order.user_id)),
self.assert_compile(
sess.query(User).join(
- (Order, User.id == Order.user_id),
- (Item, Order.items)),
+ (Order, User.id == Order.user_id),
+ (Item, Order.items)),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
def test_single_prop_1(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
self.assert_compile(
def test_single_prop_2(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
self.assert_compile(
def test_single_prop_3(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
oalias1 = aliased(Order)
def test_single_prop_4(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
oalias1 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
- "FROM orders AS orders_1 JOIN users ON users.id = orders_1.user_id, "
- "orders AS orders_2 JOIN users ON users.id = orders_2.user_id"
- )
+ "FROM orders AS orders_1 JOIN users "
+ "ON users.id = orders_1.user_id, "
+ "orders AS orders_2 JOIN users ON users.id = orders_2.user_id")
def test_single_prop_5(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
self.assert_compile(
sess.query(User).join(User.orders, Order.items),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
- "JOIN orders ON users.id = orders.user_id JOIN order_items AS order_items_1 "
- "ON orders.id = order_items_1.order_id JOIN items ON items.id = order_items_1.item_id"
+ "JOIN orders ON users.id = orders.user_id "
+ "JOIN order_items AS order_items_1 "
+ "ON orders.id = order_items_1.order_id JOIN items "
+ "ON items.id = order_items_1.item_id"
)
def test_single_prop_6(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
ualias = aliased(User)
def test_single_prop_7(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
- # this query is somewhat nonsensical. the old system didn't render a correct
- # query for this. In this case its the most faithful to what was asked -
- # there's no linkage between User.orders and "oalias", so two FROM elements
- # are generated.
+ # this query is somewhat nonsensical. the old system didn't render a
+ # correct query for this. In this case its the most faithful to what
+ # was asked - there's no linkage between User.orders and "oalias",
+ # so two FROM elements are generated.
oalias = aliased(Order)
self.assert_compile(
sess.query(User).join(User.orders, oalias.items),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders ON users.id = orders.user_id, "
- "orders AS orders_1 JOIN order_items AS order_items_1 ON orders_1.id = order_items_1.order_id "
- "JOIN items ON items.id = order_items_1.item_id"
- )
+ "orders AS orders_1 JOIN order_items AS order_items_1 "
+ "ON orders_1.id = order_items_1.order_id "
+ "JOIN items ON items.id = order_items_1.item_id")
def test_single_prop_8(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
# same as before using an aliased() for User as well
oalias = aliased(Order)
self.assert_compile(
sess.query(ualias).join(ualias.orders, oalias.items),
- "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM users AS users_1 "
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
+ "FROM users AS users_1 "
"JOIN orders ON users_1.id = orders.user_id, "
- "orders AS orders_1 JOIN order_items AS order_items_1 ON orders_1.id = order_items_1.order_id "
- "JOIN items ON items.id = order_items_1.item_id"
- )
+ "orders AS orders_1 JOIN order_items AS order_items_1 "
+ "ON orders_1.id = order_items_1.order_id "
+ "JOIN items ON items.id = order_items_1.item_id")
def test_single_prop_9(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
self.assert_compile(
sess.query(User).filter(User.name == 'ed').from_self().
- join(User.orders),
- "SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name "
+ join(User.orders),
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
- "WHERE users.name = :name_1) AS anon_1 JOIN orders ON anon_1.users_id = orders.user_id"
+ "WHERE users.name = :name_1) AS anon_1 JOIN orders "
+ "ON anon_1.users_id = orders.user_id"
)
def test_single_prop_10(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
self.assert_compile(
sess.query(User).join(User.addresses, aliased=True).
- filter(Address.email_address == 'foo'),
+ filter(Address.email_address == 'foo'),
"SELECT users.id AS users_id, users.name AS users_name "
- "FROM users JOIN addresses AS addresses_1 ON users.id = addresses_1.user_id "
+ "FROM users JOIN addresses AS addresses_1 "
+ "ON users.id = addresses_1.user_id "
"WHERE addresses_1.email_address = :email_address_1"
)
def test_single_prop_11(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
self.assert_compile(
sess.query(User).join(User.orders, Order.items, aliased=True).
- filter(Item.id == 10),
+ filter(Item.id == 10),
"SELECT users.id AS users_id, users.name AS users_name "
- "FROM users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
- "JOIN order_items AS order_items_1 ON orders_1.id = order_items_1.order_id "
+ "FROM users JOIN orders AS orders_1 "
+ "ON users.id = orders_1.user_id "
+ "JOIN order_items AS order_items_1 "
+ "ON orders_1.id = order_items_1.order_id "
"JOIN items AS items_1 ON items_1.id = order_items_1.item_id "
- "WHERE items_1.id = :id_1"
- )
+ "WHERE items_1.id = :id_1")
def test_single_prop_12(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
oalias1 = aliased(Order)
ualias = aliased(User)
self.assert_compile(
sess.query(ualias).
- join(oalias1, ualias.orders).\
- join(Address, ualias.addresses),
+ join(oalias1, ualias.orders).
+ join(Address, ualias.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id JOIN addresses ON users_1.id "
def test_single_prop_13(self):
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
# test #2 for [ticket:1706]
ualias2 = aliased(User)
self.assert_compile(
sess.query(ualias).
- join(Address, ualias.addresses).
- join(ualias2, Address.user).
- join(Order, ualias.orders),
- "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM users "
- "AS users_1 JOIN addresses ON users_1.id = addresses.user_id JOIN users AS users_2 "
- "ON users_2.id = addresses.user_id JOIN orders ON users_1.id = orders.user_id"
+ join(Address, ualias.addresses).
+ join(ualias2, Address.user).
+ join(Order, ualias.orders),
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
+ "FROM users "
+ "AS users_1 JOIN addresses ON users_1.id = addresses.user_id "
+ "JOIN users AS users_2 "
+ "ON users_2.id = addresses.user_id JOIN orders "
+ "ON users_1.id = orders.user_id"
)
def test_overlapping_paths(self):
User = self.classes.User
- for aliased in (True,False):
- # load a user who has an order that contains item id 3 and address id 1 (order 3, owned by jack)
- result = create_session().query(User).join('orders', 'items', aliased=aliased).\
- filter_by(id=3).join('orders','address', aliased=aliased).filter_by(id=1).all()
+ for aliased in (True, False):
+ # load a user who has an order that contains item id 3 and address
+ # id 1 (order 3, owned by jack)
+ result = create_session().query(User) \
+ .join('orders', 'items', aliased=aliased) \
+ .filter_by(id=3) \
+ .join('orders', 'address', aliased=aliased) \
+ .filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
def test_overlapping_paths_multilevel(self):
s = Session()
q = s.query(User).\
- join('orders').\
- join('addresses').\
- join('orders', 'items').\
- join('addresses', 'dingaling')
+ join('orders').\
+ join('addresses').\
+ join('orders', 'items').\
+ join('addresses', 'dingaling')
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
def test_overlapping_paths_outerjoin(self):
User = self.classes.User
- result = create_session().query(User).outerjoin('orders', 'items').\
- filter_by(id=3).outerjoin('orders','address').filter_by(id=1).all()
+ result = create_session().query(User).outerjoin('orders', 'items') \
+ .filter_by(id=3).outerjoin('orders', 'address') \
+ .filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
def test_raises_on_dupe_target_rel(self):
sa.exc.SAWarning,
"Pathed join target Order.items has already been joined to; "
"skipping",
- lambda: create_session().query(User).outerjoin('orders', 'items').\
- outerjoin('orders', 'items')
+ lambda: create_session().query(User).outerjoin('orders', 'items').
+ outerjoin('orders', 'items')
)
def test_from_joinpoint(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = create_session()
- for oalias,ialias in [(True, True), (False, False), (True, False), (False, True)]:
+ for oalias, ialias in [
+ (True, True),
+ (False, False),
+ (True, False),
+ (False, True)]:
eq_(
- sess.query(User).join('orders', aliased=oalias).\
- join('items',
- from_joinpoint=True,
- aliased=ialias).\
- filter(Item.description == 'item 4').all(),
+ sess.query(User).join('orders', aliased=oalias)
+ .join('items', from_joinpoint=True, aliased=ialias)
+ .filter(Item.description == 'item 4').all(),
[User(name='jack')]
)
# use middle criterion
eq_(
- sess.query(User).join('orders', aliased=oalias).\
- filter(Order.user_id==9).\
- join('items', from_joinpoint=True,
- aliased=ialias).\
- filter(Item.description=='item 4').all(),
+ sess.query(User).join('orders', aliased=oalias)
+ .filter(Order.user_id == 9)
+ .join('items', from_joinpoint=True, aliased=ialias)
+ .filter(Item.description == 'item 4').all(),
[]
)
orderalias = aliased(Order)
itemalias = aliased(Item)
eq_(
- sess.query(User).join(orderalias, 'orders').
- join(itemalias, 'items', from_joinpoint=True).
- filter(itemalias.description == 'item 4').all(),
+ sess.query(User).join(orderalias, 'orders')
+ .join(itemalias, 'items', from_joinpoint=True)
+ .filter(itemalias.description == 'item 4').all(),
[User(name='jack')]
)
eq_(
- sess.query(User).join(orderalias, 'orders').
- join(itemalias, 'items', from_joinpoint=True).
- filter(orderalias.user_id==9).\
- filter(itemalias.description=='item 4').all(),
+ sess.query(User).join(orderalias, 'orders')
+ .join(itemalias, 'items', from_joinpoint=True)
+ .filter(orderalias.user_id == 9)
+ .filter(itemalias.description == 'item 4').all(),
[]
)
sess = create_session()
eq_(
- sess.query(User).join(Address.user).\
- filter(Address.email_address=='ed@wood.com').all(),
- [User(id=8,name='ed')]
+ sess.query(User).join(Address.user)
+ .filter(Address.email_address == 'ed@wood.com').all(),
+ [User(id=8, name='ed')]
)
# its actually not so controversial if you view it in terms
# of multiple entities.
eq_(
- sess.query(User, Address).join(Address.user).filter(Address.email_address=='ed@wood.com').all(),
- [(User(id=8,name='ed'), Address(email_address='ed@wood.com'))]
+ sess.query(User, Address).join(Address.user)
+ .filter(Address.email_address == 'ed@wood.com').all(),
+ [(User(id=8, name='ed'), Address(email_address='ed@wood.com'))]
)
- # this was the controversial part. now, raise an error if the feature is abused.
+ # this was the controversial part. now, raise an error if the feature
+ # is abused.
# before the error raise was added, this would silently work.....
assert_raises(
sa_exc.InvalidRequestError,
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
- sess.query(ualias).join(oalias1, ualias.orders).
- join(oalias2, ualias.orders).
- filter(or_(oalias1.user_id==9, oalias2.user_id==7)),
- "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM users AS users_1 "
- "JOIN orders AS orders_1 ON users_1.id = orders_1.user_id JOIN orders AS orders_2 ON "
- "users_1.id = orders_2.user_id WHERE orders_1.user_id = :user_id_1 OR orders_2.user_id = :user_id_2",
- use_default_dialect=True
- )
+ sess.query(ualias).join(oalias1, ualias.orders)
+ .join(oalias2, ualias.orders)
+ .filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
+ "FROM users AS users_1 "
+ "JOIN orders AS orders_1 ON users_1.id = orders_1.user_id "
+ "JOIN orders AS orders_2 ON "
+ "users_1.id = orders_2.user_id "
+ "WHERE orders_1.user_id = :user_id_1 "
+ "OR orders_2.user_id = :user_id_2",
+ use_default_dialect=True)
def test_select_from_orm_joins(self):
User, Order = self.classes.User, self.classes.Order
oalias2 = aliased(Order)
self.assert_compile(
- join(User, oalias2, User.id==oalias2.user_id),
+ join(User, oalias2, User.id == oalias2.user_id),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id",
use_default_dialect=True
)
self.assert_compile(
join(ualias, oalias1, ualias.orders),
- "users AS users_1 JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
- use_default_dialect=True
- )
+ "users AS users_1 JOIN orders AS orders_1 "
+ "ON users_1.id = orders_1.user_id",
+ use_default_dialect=True)
self.assert_compile(
- sess.query(ualias).select_from(join(ualias, oalias1, ualias.orders)),
- "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM users AS users_1 "
+ sess.query(ualias).select_from(
+ join(ualias, oalias1, ualias.orders)),
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
+ "FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
- use_default_dialect=True
- )
+ use_default_dialect=True)
self.assert_compile(
- sess.query(User, ualias).select_from(join(ualias, oalias1, ualias.orders)),
- "SELECT users.id AS users_id, users.name AS users_name, users_1.id AS users_1_id, "
- "users_1.name AS users_1_name FROM users, users AS users_1 JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
- use_default_dialect=True
- )
+ sess.query(User, ualias).select_from(
+ join(ualias, oalias1, ualias.orders)),
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "users_1.id AS users_1_id, "
+ "users_1.name AS users_1_name FROM users, users AS users_1 "
+ "JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
+ use_default_dialect=True)
# this fails (and we cant quite fix right now).
if False:
self.assert_compile(
- sess.query(User, ualias).\
- join(oalias1, ualias.orders).\
- join(oalias2, User.id==oalias2.user_id).\
- filter(or_(oalias1.user_id==9, oalias2.user_id==7)),
- "SELECT users.id AS users_id, users.name AS users_name, users_1.id AS users_1_id, users_1.name AS "
- "users_1_name FROM users JOIN orders AS orders_2 ON users.id = orders_2.user_id, "
- "users AS users_1 JOIN orders AS orders_1 ON users_1.id = orders_1.user_id "
- "WHERE orders_1.user_id = :user_id_1 OR orders_2.user_id = :user_id_2",
- use_default_dialect=True
- )
-
- # this is the same thing using explicit orm.join() (which now offers multiple again)
- self.assert_compile(
- sess.query(User, ualias).\
- select_from(
- join(ualias, oalias1, ualias.orders),
- join(User, oalias2, User.id==oalias2.user_id),
- ).\
- filter(or_(oalias1.user_id==9, oalias2.user_id==7)),
- "SELECT users.id AS users_id, users.name AS users_name, users_1.id AS users_1_id, users_1.name AS "
- "users_1_name FROM users AS users_1 JOIN orders AS orders_1 ON users_1.id = orders_1.user_id, "
+ sess.query(User, ualias).join(oalias1, ualias.orders)
+ .join(oalias2, User.id == oalias2.user_id)
+ .filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "users_1.id AS users_1_id, users_1.name AS "
+ "users_1_name FROM users JOIN orders AS orders_2 "
+ "ON users.id = orders_2.user_id, "
+ "users AS users_1 JOIN orders AS orders_1 "
+ "ON users_1.id = orders_1.user_id "
+ "WHERE orders_1.user_id = :user_id_1 "
+ "OR orders_2.user_id = :user_id_2",
+ use_default_dialect=True)
+
+ # this is the same thing using explicit orm.join() (which now offers
+ # multiple again)
+ self.assert_compile(
+ sess.query(User, ualias).select_from(
+ join(ualias, oalias1, ualias.orders),
+ join(User, oalias2, User.id == oalias2.user_id),)
+ .filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "users_1.id AS users_1_id, users_1.name AS "
+ "users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
+ "ON users_1.id = orders_1.user_id, "
"users JOIN orders AS orders_2 ON users.id = orders_2.user_id "
- "WHERE orders_1.user_id = :user_id_1 OR orders_2.user_id = :user_id_2",
-
- use_default_dialect=True
- )
-
+ "WHERE orders_1.user_id = :user_id_1 "
+ "OR orders_2.user_id = :user_id_2",
+ use_default_dialect=True)
def test_overlapping_backwards_joins(self):
User, Order = self.classes.User, self.classes.Order
# but that is what was asked for so they get it !
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
- "SELECT users.id AS users_id, users.name AS users_name FROM orders AS orders_1 "
- "JOIN users ON users.id = orders_1.user_id, orders AS orders_2 JOIN users ON users.id = orders_2.user_id",
- use_default_dialect=True,
- )
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM orders AS orders_1 "
+ "JOIN users ON users.id = orders_1.user_id, orders AS orders_2 "
+ "JOIN users ON users.id = orders_2.user_id",
+ use_default_dialect=True,)
def test_replace_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses"""
self.classes.Order,
self.classes.Address)
-
sess = create_session()
self.assert_compile(
- sess.query(Address, User).join(Address.dingaling).join(User.orders, Order.items),
- "SELECT addresses.id AS addresses_id, addresses.user_id AS addresses_user_id, "
- "addresses.email_address AS addresses_email_address, users.id AS users_id, "
- "users.name AS users_name FROM addresses JOIN dingalings ON addresses.id = dingalings.address_id, "
- "users JOIN orders ON users.id = orders.user_id JOIN order_items AS order_items_1 "
- "ON orders.id = order_items_1.order_id JOIN items ON items.id = order_items_1.item_id",
- use_default_dialect = True
+ sess.query(Address, User)
+ .join(Address.dingaling).join(User.orders, Order.items),
+ "SELECT addresses.id AS addresses_id, "
+ "addresses.user_id AS addresses_user_id, "
+ "addresses.email_address AS addresses_email_address, "
+ "users.id AS users_id, "
+ "users.name AS users_name FROM addresses JOIN dingalings "
+ "ON addresses.id = dingalings.address_id, "
+ "users JOIN orders ON users.id = orders.user_id "
+ "JOIN order_items AS order_items_1 "
+ "ON orders.id = order_items_1.order_id JOIN items "
+ "ON items.id = order_items_1.item_id",
+ use_default_dialect=True
)
def test_multiple_adaption(self):
Item, Order, User = (self.classes.Item,
- self.classes.Order,
- self.classes.User)
+ self.classes.Order,
+ self.classes.User)
sess = create_session()
self.assert_compile(
- sess.query(User).join(User.orders, Order.items, aliased=True).filter(Order.id==7).filter(Item.id==8),
- "SELECT users.id AS users_id, users.name AS users_name FROM users JOIN orders AS orders_1 "
- "ON users.id = orders_1.user_id JOIN order_items AS order_items_1 ON orders_1.id = order_items_1.order_id "
- "JOIN items AS items_1 ON items_1.id = order_items_1.item_id WHERE orders_1.id = :id_1 AND items_1.id = :id_2",
+ sess.query(User).join(User.orders, Order.items, aliased=True)
+ .filter(Order.id == 7).filter(Item.id == 8),
+ "SELECT users.id AS users_id, users.name AS users_name FROM users "
+ "JOIN orders AS orders_1 "
+ "ON users.id = orders_1.user_id JOIN order_items AS order_items_1 "
+ "ON orders_1.id = order_items_1.order_id "
+ "JOIN items AS items_1 ON items_1.id = order_items_1.item_id "
+ "WHERE orders_1.id = :id_1 AND items_1.id = :id_2",
use_default_dialect=True
)
def test_onclause_conditional_adaption(self):
Item, Order, orders, order_items, User = (self.classes.Item,
- self.classes.Order,
- self.tables.orders,
- self.tables.order_items,
- self.classes.User)
+ self.classes.Order,
+ self.tables.orders,
+ self.tables.order_items,
+ self.classes.User)
sess = create_session()
# be using the aliased flag in this way.
self.assert_compile(
sess.query(User).join(User.orders, aliased=True).
- join(Item,
- and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id),
- from_joinpoint=True, aliased=True
- ),
- "SELECT users.id AS users_id, users.name AS users_name FROM users JOIN "
- "orders AS orders_1 ON users.id = orders_1.user_id JOIN items AS items_1 "
- "ON orders_1.id = order_items.order_id AND order_items.item_id = items_1.id",
+ join(Item,
+ and_(Order.id == order_items.c.order_id,
+ order_items.c.item_id == Item.id),
+ from_joinpoint=True, aliased=True),
+ "SELECT users.id AS users_id, users.name AS users_name FROM users "
+ "JOIN orders AS orders_1 ON users.id = orders_1.user_id "
+ "JOIN items AS items_1 "
+ "ON orders_1.id = order_items.order_id "
+ "AND order_items.item_id = items_1.id",
use_default_dialect=True
)
-
oalias = orders.select()
self.assert_compile(
- sess.query(User).join(oalias, User.orders).
- join(Item,
- and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id),
- from_joinpoint=True
- ),
- "SELECT users.id AS users_id, users.name AS users_name FROM users JOIN "
- "(SELECT orders.id AS id, orders.user_id AS user_id, orders.address_id AS address_id, orders.description "
- "AS description, orders.isopen AS isopen FROM orders) AS anon_1 ON users.id = anon_1.user_id JOIN items "
- "ON anon_1.id = order_items.order_id AND order_items.item_id = items.id",
- use_default_dialect=True
- )
+ sess.query(User).join(oalias, User.orders)
+ .join(Item,
+ and_(
+ Order.id == order_items.c.order_id,
+ order_items.c.item_id == Item.id),
+ from_joinpoint=True),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users JOIN "
+ "(SELECT orders.id AS id, orders.user_id AS user_id, "
+ "orders.address_id AS address_id, orders.description "
+ "AS description, orders.isopen AS isopen FROM orders) AS anon_1 "
+ "ON users.id = anon_1.user_id JOIN items "
+ "ON anon_1.id = order_items.order_id "
+ "AND order_items.item_id = items.id",
+ use_default_dialect=True)
# query.join(<stuff>, aliased=True).join(target, sql_expression)
- # or: query.join(path_to_some_joined_table_mapper).join(target, sql_expression)
+ # or: query.join(path_to_some_joined_table_mapper).join(target,
+ # sql_expression)
def test_pure_expression_error(self):
addresses, users = self.tables.addresses, self.tables.users
"FROM users JOIN addresses ON users.id = addresses.user_id"
)
-
def test_orderby_arg_bug(self):
User, users, Order = (self.classes.User,
- self.tables.users,
- self.classes.Order)
+ self.tables.users,
+ self.classes.Order)
sess = create_session()
# no arg error
- result = sess.query(User).join('orders', aliased=True).order_by(Order.id).reset_joinpoint().order_by(users.c.id).all()
+ result = sess.query(User).join('orders', aliased=True) \
+ .order_by(Order.id).reset_joinpoint().order_by(users.c.id).all()
def test_no_onclause(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = create_session()
eq_(
- sess.query(User).select_from(join(User, Order).join(Item, Order.items)).filter(Item.description == 'item 4').all(),
+ sess.query(User).select_from(join(User, Order)
+ .join(Item, Order.items))
+ .filter(Item.description == 'item 4').all(),
[User(name='jack')]
)
eq_(
- sess.query(User.name).select_from(join(User, Order).join(Item, Order.items)).filter(Item.description == 'item 4').all(),
+ sess.query(User.name).select_from(join(User, Order)
+ .join(Item, Order.items))
+ .filter(Item.description == 'item 4').all(),
[('jack',)]
)
def test_clause_onclause(self):
Item, Order, users, order_items, User = (self.classes.Item,
- self.classes.Order,
- self.tables.users,
- self.tables.order_items,
- self.classes.User)
+ self.classes.Order,
+ self.tables.users,
+ self.tables.order_items,
+ self.classes.User)
sess = create_session()
eq_(
- sess.query(User).join(Order, User.id==Order.user_id).
- join(order_items, Order.id==order_items.c.order_id).
- join(Item, order_items.c.item_id==Item.id).
- filter(Item.description == 'item 4').all(),
+ sess.query(User).join(Order, User.id == Order.user_id)
+ .join(order_items, Order.id == order_items.c.order_id)
+ .join(Item, order_items.c.item_id == Item.id)
+ .filter(Item.description == 'item 4').all(),
[User(name='jack')]
)
eq_(
- sess.query(User.name).join(Order, User.id==Order.user_id).
- join(order_items, Order.id==order_items.c.order_id).
- join(Item, order_items.c.item_id==Item.id).
- filter(Item.description == 'item 4').all(),
+ sess.query(User.name).join(Order, User.id == Order.user_id)
+ .join(order_items, Order.id == order_items.c.order_id)
+ .join(Item, order_items.c.item_id == Item.id)
+ .filter(Item.description == 'item 4').all(),
[('jack',)]
)
ualias = aliased(User)
eq_(
- sess.query(ualias.name).join(Order, ualias.id==Order.user_id).
- join(order_items, Order.id==order_items.c.order_id).
- join(Item, order_items.c.item_id==Item.id).
- filter(Item.description == 'item 4').all(),
+ sess.query(ualias.name).join(Order, ualias.id == Order.user_id)
+ .join(order_items, Order.id == order_items.c.order_id)
+ .join(Item, order_items.c.item_id == Item.id)
+ .filter(Item.description == 'item 4').all(),
[('jack',)]
)
# the onclause must be aliased against the query's custom
# FROM object
eq_(
- sess.query(User).order_by(User.id).offset(2).
- from_self().
- join(Order, User.id==Order.user_id).
- all(),
+ sess.query(User).order_by(User.id).offset(2)
+ .from_self()
+ .join(Order, User.id == Order.user_id)
+ .all(),
[User(name='fred')]
)
# same with an explicit select_from()
eq_(
- sess.query(User).select_entity_from(select([users]).
- order_by(User.id).offset(2).alias()).
- join(Order, User.id==Order.user_id).
- all(),
+ sess.query(User).select_entity_from(select([users])
+ .order_by(User.id)
+ .offset(2).alias())
+ .join(Order, User.id == Order.user_id).all(),
[User(name='fred')]
)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
- (address1, address2, address3, address4, address5) = sess.query(Address).all()
+ (address1, address2, address3, address4, address5) = sess \
+ .query(Address).all()
expected = [(user7, address1),
- (user8, address2),
- (user8, address3),
- (user8, address4),
- (user9, address5),
- (user10, None)]
+ (user8, address2),
+ (user8, address3),
+ (user8, address4),
+ (user9, address5),
+ (user10, None)]
q = sess.query(User)
AdAlias = aliased(Address)
q = q.add_entity(AdAlias).select_from(outerjoin(User, AdAlias))
- l = q.order_by(User.id, AdAlias.id).all()
- eq_(l, expected)
+ result = q.order_by(User.id, AdAlias.id).all()
+ eq_(result, expected)
sess.expunge_all()
q = sess.query(User).add_entity(AdAlias)
- l = q.select_from(outerjoin(User, AdAlias)).filter(AdAlias.email_address=='ed@bettyboop.com').all()
- eq_(l, [(user8, address3)])
+ result = q.select_from(outerjoin(User, AdAlias)) \
+ .filter(AdAlias.email_address == 'ed@bettyboop.com').all()
+ eq_(result, [(user8, address3)])
- l = q.select_from(outerjoin(User, AdAlias, 'addresses')).filter(AdAlias.email_address=='ed@bettyboop.com').all()
- eq_(l, [(user8, address3)])
+ result = q.select_from(outerjoin(User, AdAlias, 'addresses')) \
+ .filter(AdAlias.email_address == 'ed@bettyboop.com').all()
+ eq_(result, [(user8, address3)])
- l = q.select_from(outerjoin(User, AdAlias, User.id==AdAlias.user_id)).filter(AdAlias.email_address=='ed@bettyboop.com').all()
- eq_(l, [(user8, address3)])
+ result = q.select_from(
+ outerjoin(User, AdAlias, User.id == AdAlias.user_id)).filter(
+ AdAlias.email_address == 'ed@bettyboop.com').all()
+ eq_(result, [(user8, address3)])
- # this is the first test where we are joining "backwards" - from AdAlias to User even though
+ # this is the first test where we are joining "backwards" - from
+ # AdAlias to User even though
# the query is against User
q = sess.query(User, AdAlias)
- l = q.join(AdAlias.user).filter(User.name=='ed').order_by(User.id, AdAlias.id)
- eq_(l.all(), [(user8, address2),(user8, address3),(user8, address4),])
+ result = q.join(AdAlias.user) \
+ .filter(User.name == 'ed').order_by(User.id, AdAlias.id)
+ eq_(result.all(), [(user8, address2),
+ (user8, address3), (user8, address4), ])
- q = sess.query(User, AdAlias).select_from(join(AdAlias, User, AdAlias.user)).filter(User.name=='ed')
- eq_(l.all(), [(user8, address2),(user8, address3),(user8, address4),])
+ q = sess.query(User, AdAlias).select_from(
+ join(AdAlias, User, AdAlias.user)).filter(User.name == 'ed')
+ eq_(result.all(), [(user8, address2),
+ (user8, address3), (user8, address4), ])
def test_expression_onclauses(self):
Order, User = self.classes.Order, self.classes.User
subq = sess.query(User).subquery()
self.assert_compile(
- sess.query(User).join(subq, User.name==subq.c.name),
+ sess.query(User).join(subq, User.name == subq.c.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN (SELECT users.id AS id, users.name "
"AS name FROM users) AS anon_1 ON users.name = anon_1.name",
use_default_dialect=True
)
-
subq = sess.query(Order).subquery()
self.assert_compile(
- sess.query(User).join(subq, User.id==subq.c.user_id),
+ sess.query(User).join(subq, User.id == subq.c.user_id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT orders.id AS id, orders.user_id AS user_id, "
"orders.address_id AS address_id, orders.description AS "
)
self.assert_compile(
- sess.query(User).join(Order, User.id==Order.user_id),
+ sess.query(User).join(Order, User.id == Order.user_id),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
use_default_dialect=True
)
-
def test_implicit_joins_from_aliases(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = create_session()
OrderAlias = aliased(Order)
- eq_(
- sess.query(OrderAlias).join('items').filter_by(description='item 3').\
- order_by(OrderAlias.id).all(),
+ eq_(sess.query(OrderAlias).join('items')
+ .filter_by(description='item 3').order_by(OrderAlias.id).all(),
[
- Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1),
- Order(address_id=4,description='order 2',isopen=0,user_id=9,id=2),
- Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3)
- ]
- )
-
- eq_(
- sess.query(User, OrderAlias, Item.description).
- join(OrderAlias, 'orders').
- join('items', from_joinpoint=True).
- filter_by(description='item 3').\
- order_by(User.id, OrderAlias.id).all(),
- [
- (User(name='jack',id=7), Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1), 'item 3'),
- (User(name='jack',id=7), Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3), 'item 3'),
- (User(name='fred',id=9), Order(address_id=4,description='order 2',isopen=0,user_id=9,id=2), 'item 3')
- ]
- )
+ Order(address_id=1, description='order 1', isopen=0, user_id=7,
+ id=1),
+ Order(address_id=4, description='order 2', isopen=0, user_id=9,
+ id=2),
+ Order(address_id=1, description='order 3', isopen=1, user_id=7,
+ id=3)
+ ])
+
+ eq_(sess.query(User, OrderAlias, Item.description).
+ join(OrderAlias, 'orders').join('items', from_joinpoint=True).
+ filter_by(description='item 3').order_by(User.id, OrderAlias.id).
+ all(),
+ [(User(name='jack', id=7),
+ Order(address_id=1, description='order 1', isopen=0, user_id=7,
+ id=1),
+ 'item 3'),
+ (User(name='jack', id=7),
+ Order(address_id=1, description='order 3', isopen=1, user_id=7,
+ id=3),
+ 'item 3'),
+ (User(name='fred', id=9),
+ Order(address_id=4, description='order 2', isopen=0, user_id=9,
+ id=2),
+ 'item 3')])
def test_aliased_classes_m2m(self):
Item, Order = self.classes.Item, self.classes.Order
]
q = sess.query(Order)
- q = q.add_entity(Item).select_from(join(Order, Item, 'items')).order_by(Order.id, Item.id)
- l = q.all()
- eq_(l, expected)
+ q = q.add_entity(Item).select_from(
+ join(Order, Item, 'items')).order_by(Order.id, Item.id)
+ result = q.all()
+ eq_(result, expected)
IAlias = aliased(Item)
- q = sess.query(Order, IAlias).select_from(join(Order, IAlias, 'items')).filter(IAlias.description=='item 3')
- l = q.all()
- eq_(l,
+ q = sess.query(Order, IAlias).select_from(
+ join(Order, IAlias, 'items')) \
+ .filter(IAlias.description == 'item 3')
+ result = q.all()
+ eq_(result,
[
(order1, item3),
(order2, item3),
(order3, item3),
- ]
- )
+ ])
def test_joins_from_adapted_entities(self):
User = self.classes.User
User = self.classes.User
for aliased in (True, False):
- # load a user who has an order that contains item id 3 and address id 1 (order 3, owned by jack)
- result = create_session().query(User).join('orders', 'items', aliased=aliased).filter_by(id=3).reset_joinpoint().join('orders','address', aliased=aliased).filter_by(id=1).all()
+ # load a user who has an order that contains item id 3 and address
+ # id 1 (order 3, owned by jack)
+ result = create_session().query(User) \
+ .join('orders', 'items', aliased=aliased) \
+ .filter_by(id=3).reset_joinpoint() \
+ .join('orders', 'address', aliased=aliased) \
+ .filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
- result = create_session().query(User).join('orders', 'items', aliased=aliased, isouter=True).filter_by(id=3).reset_joinpoint().join('orders','address', aliased=aliased, isouter=True).filter_by(id=1).all()
+ result = create_session().query(User) \
+ .join('orders', 'items', aliased=aliased, isouter=True) \
+ .filter_by(id=3).reset_joinpoint() \
+ .join('orders', 'address', aliased=aliased, isouter=True) \
+ .filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
- result = create_session().query(User).outerjoin('orders', 'items', aliased=aliased).filter_by(id=3).reset_joinpoint().outerjoin('orders','address', aliased=aliased).filter_by(id=1).all()
+ result = create_session().query(User).outerjoin(
+ 'orders', 'items', aliased=aliased).filter_by(
+ id=3).reset_joinpoint().outerjoin(
+ 'orders', 'address', aliased=aliased).filter_by(
+ id=1).all()
assert [User(id=7, name='jack')] == result
def test_overlap_with_aliases(self):
orders, User, users = (self.tables.orders,
- self.classes.User,
- self.tables.users)
+ self.classes.User,
+ self.tables.users)
oalias = orders.alias('oalias')
- result = create_session().query(User).select_from(users.join(oalias)).filter(oalias.c.description.in_(["order 1", "order 2", "order 3"])).join('orders', 'items').order_by(User.id).all()
+ result = create_session().query(User).select_from(users.join(oalias)) \
+ .filter(oalias.c.description.in_(
+ ["order 1", "order 2", "order 3"])) \
+ .join('orders', 'items').order_by(User.id).all()
assert [User(id=7, name='jack'), User(id=9, name='fred')] == result
- result = create_session().query(User).select_from(users.join(oalias)).filter(oalias.c.description.in_(["order 1", "order 2", "order 3"])).join('orders', 'items').filter_by(id=4).all()
+ result = create_session().query(User).select_from(users.join(oalias)) \
+ .filter(oalias.c.description.in_(
+ ["order 1", "order 2", "order 3"])) \
+ .join('orders', 'items').filter_by(id=4).all()
assert [User(id=7, name='jack')] == result
def test_aliased(self):
"""test automatic generation of aliased joins."""
Item, Order, User, Address = (self.classes.Item,
- self.classes.Order,
- self.classes.User,
- self.classes.Address)
-
+ self.classes.Order,
+ self.classes.User,
+ self.classes.Address)
sess = create_session()
# test a basic aliasized path
- q = sess.query(User).join('addresses', aliased=True).filter_by(email_address='jack@bean.com')
+ q = sess.query(User).join('addresses', aliased=True).filter_by(
+ email_address='jack@bean.com')
assert [User(id=7)] == q.all()
- q = sess.query(User).join('addresses', aliased=True).filter(Address.email_address=='jack@bean.com')
+ q = sess.query(User).join('addresses', aliased=True).filter(
+ Address.email_address == 'jack@bean.com')
assert [User(id=7)] == q.all()
- q = sess.query(User).join('addresses', aliased=True).filter(or_(Address.email_address=='jack@bean.com', Address.email_address=='fred@fred.com'))
+ q = sess.query(User).join('addresses', aliased=True).filter(or_(
+ Address.email_address == 'jack@bean.com',
+ Address.email_address == 'fred@fred.com'))
assert [User(id=7), User(id=9)] == q.all()
- # test two aliasized paths, one to 'orders' and the other to 'orders','items'.
- # one row is returned because user 7 has order 3 and also has order 1 which has item 1
+ # test two aliasized paths, one to 'orders' and the other to
+ # 'orders','items'. one row is returned because user 7 has order 3 and
+ # also has order 1 which has item 1
# this tests a o2m join and a m2m join.
- q = sess.query(User).join('orders', aliased=True).filter(Order.description=="order 3").join('orders', 'items', aliased=True).filter(Item.description=="item 1")
+ q = sess.query(User).join('orders', aliased=True) \
+ .filter(Order.description == "order 3") \
+ .join('orders', 'items', aliased=True) \
+ .filter(Item.description == "item 1")
assert q.count() == 1
assert [User(id=7)] == q.all()
- # test the control version - same joins but not aliased. rows are not returned because order 3 does not have item 1
- q = sess.query(User).join('orders').filter(Order.description=="order 3").join('orders', 'items').filter(Item.description=="item 1")
+ # test the control version - same joins but not aliased. rows are not
+ # returned because order 3 does not have item 1
+ q = sess.query(User).join('orders').filter(
+ Order.description == "order 3").join(
+ 'orders', 'items').filter(
+ Item.description == "item 1")
assert [] == q.all()
assert q.count() == 0
# the left half of the join condition of the any() is aliased.
- q = sess.query(User).join('orders', aliased=True).filter(Order.items.any(Item.description=='item 4'))
+ q = sess.query(User).join('orders', aliased=True).filter(
+ Order.items.any(Item.description == 'item 4'))
assert [User(id=7)] == q.all()
# test that aliasing gets reset when join() is called
- q = sess.query(User).join('orders', aliased=True).filter(Order.description=="order 3").join('orders', aliased=True).filter(Order.description=="order 5")
+ q = sess.query(User).join('orders', aliased=True) \
+ .filter(Order.description == "order 3") \
+ .join('orders', aliased=True) \
+ .filter(Order.description == "order 5")
assert q.count() == 1
assert [User(id=7)] == q.all()
ualias = aliased(User)
eq_(
- sess.query(User, ualias).filter(User.id > ualias.id).order_by(desc(ualias.id), User.name).all(),
+ sess.query(User, ualias).filter(User.id > ualias.id)
+ .order_by(desc(ualias.id), User.name).all(),
[
- (User(id=10,name='chuck'), User(id=9,name='fred')),
- (User(id=10,name='chuck'), User(id=8,name='ed')),
- (User(id=9,name='fred'), User(id=8,name='ed')),
- (User(id=10,name='chuck'), User(id=7,name='jack')),
- (User(id=8,name='ed'), User(id=7,name='jack')),
- (User(id=9,name='fred'), User(id=7,name='jack'))
+ (User(id=10, name='chuck'), User(id=9, name='fred')),
+ (User(id=10, name='chuck'), User(id=8, name='ed')),
+ (User(id=9, name='fred'), User(id=8, name='ed')),
+ (User(id=10, name='chuck'), User(id=7, name='jack')),
+ (User(id=8, name='ed'), User(id=7, name='jack')),
+ (User(id=9, name='fred'), User(id=7, name='jack'))
]
)
def test_plain_table(self):
addresses, User = self.tables.addresses, self.classes.User
-
sess = create_session()
eq_(
- sess.query(User.name).join(addresses, User.id==addresses.c.user_id).order_by(User.id).all(),
+ sess.query(User.name)
+ .join(addresses, User.id == addresses.c.user_id)
+ .order_by(User.id).all(),
[('jack',), ('ed',), ('ed',), ('ed',), ('fred',)]
)
)
def test_select_from(self):
- """Test that the left edge of the join can be set reliably with select_from()."""
+ """Test that the left edge of the join can be set reliably with
+ select_from()."""
Item, Order, User = (self.classes.Item,
- self.classes.Order,
- self.classes.User)
-
+ self.classes.Order,
+ self.classes.User)
sess = create_session()
self.assert_compile(
- sess.query(Item.id).select_from(User).join(User.orders).join(Order.items),
+ sess.query(Item.id).select_from(User)
+ .join(User.orders).join(Order.items),
"SELECT items.id AS items_id FROM users JOIN orders ON "
"users.id = orders.user_id JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items ON items.id = "
# here, the join really wants to add a second FROM clause
# for "Item". but select_from disallows that
self.assert_compile(
- sess.query(Item.id).select_from(User).join(Item, User.id==Item.id),
- "SELECT items.id AS items_id FROM users JOIN items ON users.id = items.id",
- use_default_dialect=True
- )
-
-
-
+ sess.query(Item.id).select_from(User)
+ .join(Item, User.id == Item.id),
+ "SELECT items.id AS items_id FROM users JOIN items "
+ "ON users.id = items.id",
+ use_default_dialect=True)
def test_from_self_resets_joinpaths(self):
"""test a join from from_self() doesn't confuse joins inside the subquery
sess = create_session()
self.assert_compile(
- sess.query(Item).join(Item.keywords).from_self(Keyword).join(Item.keywords),
- "SELECT keywords.id AS keywords_id, keywords.name AS keywords_name FROM "
- "(SELECT items.id AS items_id, items.description AS items_description "
+ sess.query(Item).join(Item.keywords).from_self(Keyword)
+ .join(Item.keywords),
+ "SELECT keywords.id AS keywords_id, "
+ "keywords.name AS keywords_name "
+ "FROM (SELECT items.id AS items_id, "
+ "items.description AS items_description "
"FROM items JOIN item_keywords AS item_keywords_1 ON items.id = "
- "item_keywords_1.item_id JOIN keywords ON keywords.id = item_keywords_1.keyword_id) "
+ "item_keywords_1.item_id JOIN keywords "
+ "ON keywords.id = item_keywords_1.keyword_id) "
"AS anon_1 JOIN item_keywords AS item_keywords_2 ON "
"anon_1.items_id = item_keywords_2.item_id "
"JOIN keywords ON "
"keywords.id = item_keywords_2.keyword_id",
- use_default_dialect=True
- )
+ use_default_dialect=True)
+
class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('table1', metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True))
Table('table2', metadata,
- Column('id', Integer, primary_key=True),
- Column('t1_id', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1_id', Integer))
@classmethod
def setup_classes(cls):
table1, table2 = cls.tables.table1, cls.tables.table2
+
class T1(cls.Comparable):
pass
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
self.assert_compile(
- sess.query(subq.c.count, T1.id).select_from(subq).join(T1, subq.c.t1_id==T1.id),
+ sess.query(subq.c.count, T1.id)
+ .select_from(subq).join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
- "GROUP BY table2.t1_id) AS anon_1 JOIN table1 ON anon_1.t1_id = table1.id"
+ "GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
+ "ON anon_1.t1_id = table1.id"
)
def test_select_mapped_to_mapped_implicit_left(self):
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
self.assert_compile(
- sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id==T1.id),
+ sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
- "GROUP BY table2.t1_id) AS anon_1 JOIN table1 ON anon_1.t1_id = table1.id"
+ "GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
+ "ON anon_1.t1_id = table1.id"
)
def test_select_mapped_to_select_explicit_left(self):
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
self.assert_compile(
- sess.query(subq.c.count, T1.id).select_from(T1).join(subq, subq.c.t1_id==T1.id),
+ sess.query(subq.c.count, T1.id).select_from(T1)
+ .join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 GROUP BY table2.t1_id) "
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
assert_raises_message(
sa_exc.InvalidRequestError,
r"Can't construct a join from ",
- sess.query(subq.c.count, T1.id).join, subq, subq.c.t1_id==T1.id,
+ sess.query(subq.c.count, T1.id).join, subq, subq.c.t1_id == T1.id,
)
def test_mapped_select_to_mapped_implicit_left(self):
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
assert_raises_message(
sa_exc.InvalidRequestError,
)
self.assert_compile(
- sess.query(T1.id, subq.c.count).select_from(subq).\
- join(T1, subq.c.t1_id == T1.id),
+ sess.query(T1.id, subq.c.count).select_from(subq).
+ join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id"
)
-
def test_mapped_select_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
self.assert_compile(
- sess.query(T1.id, subq.c.count).select_from(subq).join(T1, subq.c.t1_id==T1.id),
+ sess.query(T1.id, subq.c.count).select_from(subq)
+ .join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
self.assert_compile(
- sess.query(T1.id, subq.c.count).select_from(T1).join(subq, subq.c.t1_id==T1.id),
+ sess.query(T1.id, subq.c.count).select_from(T1)
+ .join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
- "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
+ "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
+ "count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
- "ON anon_1.t1_id = table1.id"
- )
+ "ON anon_1.t1_id = table1.id")
def test_mapped_select_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = Session()
subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\
- group_by(T2.t1_id).subquery()
+ group_by(T2.t1_id).subquery()
self.assert_compile(
- sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id==T1.id),
+ sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
- "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
+ "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
+ "count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
- "ON anon_1.t1_id = table1.id"
- )
+ "ON anon_1.t1_id = table1.id")
+
class MultiplePathTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
t2 = Table('t2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
t1t2_1 = Table('t1t2_1', metadata,
- Column('t1id', Integer, ForeignKey('t1.id')),
- Column('t2id', Integer, ForeignKey('t2.id'))
- )
+ Column('t1id', Integer, ForeignKey('t1.id')),
+ Column('t2id', Integer, ForeignKey('t2.id')))
t1t2_2 = Table('t1t2_2', metadata,
- Column('t1id', Integer, ForeignKey('t1.id')),
- Column('t2id', Integer, ForeignKey('t2.id'))
- )
+ Column('t1id', Integer, ForeignKey('t1.id')),
+ Column('t2id', Integer, ForeignKey('t2.id')))
def test_basic(self):
t2, t1t2_1, t1t2_2, t1 = (self.tables.t2,
- self.tables.t1t2_1,
- self.tables.t1t2_2,
- self.tables.t1)
+ self.tables.t1t2_1,
+ self.tables.t1t2_2,
+ self.tables.t1)
class T1(object):
pass
+
class T2(object):
pass
})
mapper(T2, t2)
- q = create_session().query(T1).join('t2s_1').filter(t2.c.id==5).reset_joinpoint().join('t2s_2')
+ q = create_session().query(T1).join('t2s_1') \
+ .filter(t2.c.id == 5).reset_joinpoint().join('t2s_2')
self.assert_compile(
q,
- "SELECT t1.id AS t1_id, t1.data AS t1_data FROM t1 JOIN t1t2_1 AS t1t2_1_1 "
- "ON t1.id = t1t2_1_1.t1id JOIN t2 ON t2.id = t1t2_1_1.t2id JOIN t1t2_2 AS t1t2_2_1 "
- "ON t1.id = t1t2_2_1.t1id JOIN t2 ON t2.id = t1t2_2_1.t2id WHERE t2.id = :id_1"
- , use_default_dialect=True
- )
+ "SELECT t1.id AS t1_id, t1.data AS t1_data FROM t1 "
+ "JOIN t1t2_1 AS t1t2_1_1 "
+ "ON t1.id = t1t2_1_1.t1id JOIN t2 ON t2.id = t1t2_1_1.t2id "
+ "JOIN t1t2_2 AS t1t2_2_1 "
+ "ON t1.id = t1t2_2_1.t1id JOIN t2 ON t2.id = t1t2_2_1.t2id "
+ "WHERE t2.id = :id_1",
+ use_default_dialect=True)
class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
nodes = Table('nodes', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')))
sub_table = Table('sub_table', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('node_id', Integer, ForeignKey('nodes.id')),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('node_id', Integer, ForeignKey('nodes.id')))
assoc_table = Table('assoc_table', metadata,
- Column('left_id', Integer, ForeignKey('nodes.id')),
- Column('right_id', Integer, ForeignKey('nodes.id'))
- )
+ Column('left_id', Integer, ForeignKey('nodes.id')),
+ Column('right_id', Integer,
+ ForeignKey('nodes.id')))
@classmethod
def setup_classes(cls):
nodes, assoc_table, sub_table = (cls.tables.nodes,
- cls.tables.assoc_table,
- cls.tables.sub_table)
+ cls.tables.assoc_table,
+ cls.tables.sub_table)
class Node(cls.Comparable):
pass
pass
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy='select', join_depth=3,
- backref=backref('parent', remote_side=[nodes.c.id])
- ),
- 'subs' : relationship(Sub),
- 'assoc':relationship(Node,
- secondary=assoc_table,
- primaryjoin=nodes.c.id==assoc_table.c.left_id,
- secondaryjoin=nodes.c.id==assoc_table.c.right_id)
+ 'children': relationship(Node, lazy='select', join_depth=3,
+ backref=backref(
+ 'parent', remote_side=[nodes.c.id])
+ ),
+ 'subs': relationship(Sub),
+ 'assoc': relationship(
+ Node,
+ secondary=assoc_table,
+ primaryjoin=nodes.c.id == assoc_table.c.left_id,
+ secondaryjoin=nodes.c.id == assoc_table.c.right_id)
})
mapper(Sub, sub_table)
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
- "assoc_table_1.right_id JOIN sub_table ON nodes_1.id = sub_table.node_id",
+ "assoc_table_1.right_id JOIN sub_table "
+ "ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
- "assoc_table_1.right_id JOIN sub_table ON nodes.id = sub_table.node_id",
+ "assoc_table_1.right_id JOIN sub_table "
+ "ON nodes.id = sub_table.node_id",
)
+
class CreateJoinsTest(fixtures.ORMTest, AssertsCompiledSQL):
__dialect__ = 'default'
m = MetaData()
base = Table('base', m, Column('id', Integer, primary_key=True))
a = Table('a', m,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('b_id', Integer, ForeignKey('b.id')))
+ Column('id', Integer, ForeignKey('base.id'),
+ primary_key=True),
+ Column('b_id', Integer, ForeignKey('b.id')))
b = Table('b', m,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('c_id', Integer, ForeignKey('c.id')))
+ Column('id', Integer, ForeignKey('base.id'),
+ primary_key=True),
+ Column('c_id', Integer, ForeignKey('c.id')))
c = Table('c', m,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True))
+ Column('id', Integer, ForeignKey('base.id'),
+ primary_key=True))
+
class Base(object):
pass
+
class A(Base):
pass
+
class B(Base):
pass
+
class C(Base):
pass
mapper(Base, base)
- mapper(A, a, inherits=Base, properties={'b':relationship(B, primaryjoin=a.c.b_id==b.c.id)})
- mapper(B, b, inherits=Base, properties={'c':relationship(C, primaryjoin=b.c.c_id==c.c.id)})
+ mapper(A, a, inherits=Base, properties={
+ 'b': relationship(B, primaryjoin=a.c.b_id == b.c.id)})
+ mapper(B, b, inherits=Base, properties={
+ 'c': relationship(C, primaryjoin=b.c.c_id == c.c.id)})
mapper(C, c, inherits=Base)
return A, B, C, Base
A, B, C, Base = self._inherits_fixture()
s = Session()
self.assert_compile(
- s.query(A).filter(A.b.has(B.c.has(C.id==5))),
+ s.query(A).filter(A.b.has(B.c.has(C.id == 5))),
"SELECT a.id AS a_id, base.id AS base_id, a.b_id AS a_b_id "
"FROM base JOIN a ON base.id = a.id WHERE "
"EXISTS (SELECT 1 FROM (SELECT base.id AS base_id, b.id AS "
@classmethod
def define_tables(cls, metadata):
Table("parent", metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50)),
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)))
Table("child", metadata,
- Column('id', Integer, primary_key=True),
- Column('parent_id', Integer, ForeignKey('parent.id')),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', Integer, ForeignKey('parent.id')),
+ Column('data', String(50)))
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
+
class Parent(cls.Comparable):
pass
npc = self.npc
sess = Session()
self.assert_compile(
- sess.query(Parent).join(Parent.npc).filter(self.derived.c.data == 'x'),
+ sess.query(Parent).join(Parent.npc)
+ .filter(self.derived.c.data == 'x'),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
- "FROM parent JOIN (SELECT child.id AS id, child.parent_id AS parent_id, "
+ "FROM parent JOIN (SELECT child.id AS id, "
+ "child.parent_id AS parent_id, "
"child.data AS data "
"FROM child) AS anon_1 ON parent.id = anon_1.parent_id "
- "WHERE anon_1.data = :data_1"
- )
+ "WHERE anon_1.data = :data_1")
def test_join_parent_child_select_from(self):
Parent = self.classes.Parent
npc = self.npc
sess = Session()
self.assert_compile(
- sess.query(npc).select_from(Parent).join(Parent.npc).\
- filter(self.derived.c.data == 'x'),
+ sess.query(npc).select_from(Parent).join(Parent.npc)
+ .filter(self.derived.c.data == 'x'),
"SELECT anon_1.id AS anon_1_id, anon_1.parent_id "
"AS anon_1_parent_id, anon_1.data AS anon_1_data "
"FROM parent JOIN (SELECT child.id AS id, child.parent_id AS "
npc = self.npc
sess = Session()
self.assert_compile(
- sess.query(Parent, npc).join(Parent.npc).filter(
- self.derived.c.data == 'x'),
+ sess.query(Parent, npc).join(Parent.npc)
+ .filter(self.derived.c.data == 'x'),
"SELECT parent.id AS parent_id, parent.data AS parent_data, "
"anon_1.id AS anon_1_id, anon_1.parent_id AS anon_1_parent_id, "
"anon_1.data AS anon_1_data FROM parent JOIN "
@classmethod
def define_tables(cls, metadata):
Table('nodes', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id')),
- Column('data', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')),
+ Column('data', String(30)))
@classmethod
def setup_classes(cls):
- class Node(cls.Comparable):
- def append(self, node):
- self.children.append(node)
+ class Node(cls.Comparable):
+ def append(self, node):
+ self.children.append(node)
@classmethod
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy='select', join_depth=3,
- backref=backref('parent', remote_side=[nodes.c.id])
- ),
+ 'children': relationship(Node, lazy='select', join_depth=3,
+ backref=backref(
+ 'parent', remote_side=[nodes.c.id])
+ ),
})
@classmethod
Node = self.classes.Node
sess = create_session()
- node = sess.query(Node).join('children', aliased=True).filter_by(data='n122').first()
- assert node.data=='n12'
+ node = sess.query(Node) \
+ .join('children', aliased=True).filter_by(data='n122').first()
+ assert node.data == 'n12'
def test_join_2(self):
Node = self.classes.Node
sess = create_session()
- ret = sess.query(Node.data).join(Node.children, aliased=True).filter_by(data='n122').all()
+ ret = sess.query(Node.data) \
+ .join(Node.children, aliased=True).filter_by(data='n122').all()
assert ret == [('n12',)]
-
def test_join_3(self):
Node = self.classes.Node
sess = create_session()
- node = sess.query(Node).join('children', 'children', aliased=True).filter_by(data='n122').first()
- assert node.data=='n1'
+ node = sess.query(Node) \
+ .join('children', 'children', aliased=True) \
+ .filter_by(data='n122').first()
+ assert node.data == 'n1'
def test_join_4(self):
Node = self.classes.Node
sess = create_session()
- node = sess.query(Node).filter_by(data='n122').join('parent', aliased=True).filter_by(data='n12').\
- join('parent', aliased=True, from_joinpoint=True).filter_by(data='n1').first()
+ node = sess.query(Node) \
+ .filter_by(data='n122').join('parent', aliased=True) \
+ .filter_by(data='n12') \
+ .join('parent', aliased=True, from_joinpoint=True) \
+ .filter_by(data='n1').first()
assert node.data == 'n122'
def test_string_or_prop_aliased(self):
Node = self.classes.Node
-
sess = create_session()
- nalias = aliased(Node, sess.query(Node).filter_by(data='n1').subquery())
+ nalias = aliased(Node,
+ sess.query(Node).filter_by(data='n1').subquery())
q1 = sess.query(nalias).join(nalias.children, aliased=True).\
- join(Node.children, from_joinpoint=True)
+ join(Node.children, from_joinpoint=True)
q2 = sess.query(nalias).join(nalias.children, aliased=True).\
- join("children", from_joinpoint=True)
+ join("children", from_joinpoint=True)
for q in (q1, q2):
self.assert_compile(
)
q1 = sess.query(Node).join(nalias.children, aliased=True).\
- join(Node.children, aliased=True, from_joinpoint=True).\
- join(Node.children, from_joinpoint=True)
+ join(Node.children, aliased=True, from_joinpoint=True).\
+ join(Node.children, from_joinpoint=True)
q2 = sess.query(Node).join(nalias.children, aliased=True).\
- join("children", aliased=True, from_joinpoint=True).\
- join("children", from_joinpoint=True)
+ join("children", aliased=True, from_joinpoint=True).\
+ join("children", from_joinpoint=True)
for q in (q1, q2):
self.assert_compile(
# n1 is not inside the from_self(), so all cols must be maintained
# on the outside
self.assert_compile(
- sess.query(Node).filter(Node.data=='n122').from_self(n1, Node.id),
- "SELECT nodes_1.id AS nodes_1_id, nodes_1.parent_id AS nodes_1_parent_id, "
+ sess.query(Node).filter(Node.data == 'n122')
+ .from_self(n1, Node.id),
+ "SELECT nodes_1.id AS nodes_1_id, "
+ "nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id "
"FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, "
- "nodes.parent_id AS nodes_parent_id, nodes.data AS nodes_data FROM "
+ "nodes.parent_id AS nodes_parent_id, "
+ "nodes.data AS nodes_data FROM "
"nodes WHERE nodes.data = :data_1) AS anon_1",
- use_default_dialect=True
- )
+ use_default_dialect=True)
parent = aliased(Node)
grandparent = aliased(Node)
q = sess.query(Node, parent, grandparent).\
join(parent, Node.parent).\
join(grandparent, parent.parent).\
- filter(Node.data=='n122').filter(parent.data=='n12').\
- filter(grandparent.data=='n1').from_self().limit(1)
+ filter(Node.data == 'n122').filter(parent.data == 'n12').\
+ filter(grandparent.data == 'n1').from_self().limit(1)
# parent, grandparent *are* inside the from_self(), so they
# should get aliased to the outside.
"anon_1.nodes_2_data AS anon_1_nodes_2_data "
"FROM (SELECT nodes.id AS nodes_id, nodes.parent_id "
"AS nodes_parent_id, nodes.data AS nodes_data, "
- "nodes_1.id AS nodes_1_id, nodes_1.parent_id AS nodes_1_parent_id, "
+ "nodes_1.id AS nodes_1_id, "
+ "nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, "
"nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS "
"nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON "
"ON nodes_2.id = nodes_1.parent_id "
"WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND "
"nodes_2.data = :data_3) AS anon_1 LIMIT :param_1",
- {'param_1':1},
- use_default_dialect=True
- )
+ {'param_1': 1},
+ use_default_dialect=True)
def test_explicit_join_1(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
- # the join_to_left=False here is unfortunate. the default on this flag should
- # be False.
+ # the join_to_left=False here is unfortunate. the default on this
+ # flag should be False.
self.assert_compile(
- join(Node, n1, Node.children).join(n2, Node.children, join_to_left=False),
+ join(Node, n1, Node.children)
+ .join(n2, Node.children, join_to_left=False),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, n1.children),
- "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, nodes.data AS "
- "nodes_data FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
+ "nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
+ "ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
- use_default_dialect=True
- )
+ use_default_dialect=True)
def test_explicit_join_5(self):
Node = self.classes.Node
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, Node.children),
- "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, nodes.data AS "
- "nodes_data FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
+ "nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
+ "ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
- use_default_dialect=True
- )
+ use_default_dialect=True)
def test_explicit_join_6(self):
Node = self.classes.Node
n1 = aliased(Node)
node = sess.query(Node).select_from(join(Node, n1, 'children')).\
- filter(n1.data == 'n122').first()
+ filter(n1.data == 'n122').first()
assert node.data == 'n12'
def test_explicit_join_7(self):
n2 = aliased(Node)
node = sess.query(Node).select_from(
- join(Node, n1, 'children').join(n2, 'children')).\
+ join(Node, n1, 'children').join(n2, 'children')).\
filter(n2.data == 'n122').first()
assert node.data == 'n1'
# mix explicit and named onclauses
node = sess.query(Node).select_from(
- join(Node, n1, Node.id == n1.parent_id).join(n2, 'children')).\
+ join(Node, n1, Node.id == n1.parent_id).join(n2, 'children')).\
filter(n2.data == 'n122').first()
assert node.data == 'n1'
n1 = aliased(Node)
n2 = aliased(Node)
- node = sess.query(Node).select_from(join(Node, n1, 'parent').join(n2, 'parent')).\
- filter(and_(Node.data == 'n122', n1.data == 'n12', n2.data == 'n1')).first()
+ node = sess.query(Node).select_from(
+ join(Node, n1, 'parent').join(n2, 'parent')).filter(
+ and_(Node.data == 'n122', n1.data == 'n12', n2.data == 'n1')) \
+ .first()
assert node.data == 'n122'
def test_explicit_join_10(self):
n2 = aliased(Node)
eq_(
- list(sess.query(Node).select_from(join(Node, n1, 'parent').join(n2, 'parent')).\
- filter(and_(Node.data == 'n122',
- n1.data == 'n12',
- n2.data == 'n1')).values(Node.data, n1.data, n2.data)),
+ list(sess.query(Node).select_from(join(Node, n1, 'parent')
+ .join(n2, 'parent')).
+ filter(and_(Node.data == 'n122',
+ n1.data == 'n12',
+ n2.data == 'n1')).values(Node.data, n1.data,
+ n2.data)),
[('n122', 'n12', 'n1')])
def test_join_to_nonaliased(self):
n1 = aliased(Node)
# using 'n1.parent' implicitly joins to unaliased Node
- eq_(
- sess.query(n1).join(n1.parent).filter(Node.data=='n1').all(),
- [Node(parent_id=1,data='n11',id=2), Node(parent_id=1,data='n12',id=3), Node(parent_id=1,data='n13',id=4)]
- )
+ eq_(sess.query(n1).join(n1.parent).filter(Node.data == 'n1').all(),
+ [Node(parent_id=1, data='n11', id=2),
+ Node(parent_id=1, data='n12', id=3),
+ Node(parent_id=1, data='n13', id=4)])
# explicit (new syntax)
- eq_(
- sess.query(n1).join(Node, n1.parent).filter(Node.data=='n1').all(),
- [Node(parent_id=1,data='n11',id=2), Node(parent_id=1,data='n12',id=3), Node(parent_id=1,data='n13',id=4)]
- )
-
+ eq_(sess.query(n1).join(Node, n1.parent).filter(Node.data
+ == 'n1').all(),
+ [Node(parent_id=1, data='n11', id=2),
+ Node(parent_id=1, data='n12', id=3),
+ Node(parent_id=1, data='n13', id=4)])
def test_multiple_explicit_entities_one(self):
Node = self.classes.Node
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
- sess.query(Node, parent, grandparent).\
- join(parent, Node.parent).\
- join(grandparent, parent.parent).\
- filter(Node.data=='n122').filter(parent.data=='n12').\
- filter(grandparent.data=='n1').first(),
+ sess.query(Node, parent, grandparent).
+ join(parent, Node.parent).
+ join(grandparent, parent.parent).
+ filter(Node.data == 'n122').filter(parent.data == 'n12').
+ filter(grandparent.data == 'n1').first(),
(Node(data='n122'), Node(data='n12'), Node(data='n1'))
)
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
- sess.query(Node, parent, grandparent).\
- join(parent, Node.parent).\
- join(grandparent, parent.parent).\
- filter(Node.data == 'n122').filter(parent.data == 'n12').\
- filter(grandparent.data == 'n1').from_self().first(),
+ sess.query(Node, parent, grandparent).
+ join(parent, Node.parent).
+ join(grandparent, parent.parent).
+ filter(Node.data == 'n122').filter(parent.data == 'n12').
+ filter(grandparent.data == 'n1').from_self().first(),
(Node(data='n122'), Node(data='n12'), Node(data='n1'))
)
grandparent = aliased(Node)
# same, change order around
eq_(
- sess.query(parent, grandparent, Node).\
- join(parent, Node.parent).\
- join(grandparent, parent.parent).\
- filter(Node.data == 'n122').filter(parent.data == 'n12').\
- filter(grandparent.data == 'n1').from_self().first(),
+ sess.query(parent, grandparent, Node).
+ join(parent, Node.parent).
+ join(grandparent, parent.parent).
+ filter(Node.data == 'n122').filter(parent.data == 'n12').
+ filter(grandparent.data == 'n1').from_self().first(),
(Node(data='n12'), Node(data='n1'), Node(data='n122'))
)
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
- sess.query(Node, parent, grandparent).\
- join(parent, Node.parent).\
- join(grandparent, parent.parent).\
- filter(Node.data=='n122').filter(parent.data=='n12').\
- filter(grandparent.data=='n1').\
- options(joinedload(Node.children)).first(),
+ sess.query(Node, parent, grandparent).
+ join(parent, Node.parent).
+ join(grandparent, parent.parent).
+ filter(Node.data == 'n122').filter(parent.data == 'n12').
+ filter(grandparent.data == 'n1').
+ options(joinedload(Node.children)).first(),
(Node(data='n122'), Node(data='n12'), Node(data='n1'))
)
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
- sess.query(Node, parent, grandparent).\
- join(parent, Node.parent).\
- join(grandparent, parent.parent).\
- filter(Node.data=='n122').filter(parent.data=='n12').\
- filter(grandparent.data=='n1').from_self().\
- options(joinedload(Node.children)).first(),
+ sess.query(Node, parent, grandparent).
+ join(parent, Node.parent).
+ join(grandparent, parent.parent).
+ filter(Node.data == 'n122').filter(parent.data == 'n12').
+ filter(grandparent.data == 'n1').from_self().
+ options(joinedload(Node.children)).first(),
(Node(data='n122'), Node(data='n12'), Node(data='n1'))
)
-
def test_any(self):
Node = self.classes.Node
sess = create_session()
- eq_(sess.query(Node).filter(Node.children.any(Node.data=='n1')).all(), [])
- eq_(sess.query(Node).filter(Node.children.any(Node.data=='n12')).all(), [Node(data='n1')])
- eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(),
- [Node(data='n11'), Node(data='n13'),Node(data='n121'),Node(data='n122'),Node(data='n123'),])
+ eq_(sess.query(Node).filter(Node.children.any(Node.data == 'n1'))
+ .all(), [])
+ eq_(sess.query(Node)
+ .filter(Node.children.any(Node.data == 'n12')).all(),
+ [Node(data='n1')])
+ eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id)
+ .all(), [Node(data='n11'), Node(data='n13'), Node(data='n121'),
+ Node(data='n122'), Node(data='n123'), ])
def test_has(self):
Node = self.classes.Node
sess = create_session()
- eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(),
- [Node(data='n121'),Node(data='n122'),Node(data='n123')])
- eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n122')).all(), [])
- eq_(sess.query(Node).filter(~Node.parent.has()).all(), [Node(data='n1')])
+ eq_(sess.query(Node).filter(Node.parent.has(Node.data == 'n12'))
+ .order_by(Node.id).all(),
+ [Node(data='n121'), Node(data='n122'), Node(data='n123')])
+ eq_(sess.query(Node).filter(Node.parent.has(Node.data == 'n122'))
+ .all(), [])
+ eq_(sess.query(Node).filter(
+ ~Node.parent.has()).all(), [Node(data='n1')])
def test_contains(self):
Node = self.classes.Node
sess = create_session()
- n122 = sess.query(Node).filter(Node.data=='n122').one()
- eq_(sess.query(Node).filter(Node.children.contains(n122)).all(), [Node(data='n12')])
+ n122 = sess.query(Node).filter(Node.data == 'n122').one()
+ eq_(sess.query(Node).filter(Node.children.contains(n122)).all(),
+ [Node(data='n12')])
- n13 = sess.query(Node).filter(Node.data=='n13').one()
- eq_(sess.query(Node).filter(Node.children.contains(n13)).all(), [Node(data='n1')])
+ n13 = sess.query(Node).filter(Node.data == 'n13').one()
+ eq_(sess.query(Node).filter(Node.children.contains(n13)).all(),
+ [Node(data='n1')])
def test_eq_ne(self):
Node = self.classes.Node
sess = create_session()
- n12 = sess.query(Node).filter(Node.data=='n12').one()
- eq_(sess.query(Node).filter(Node.parent==n12).all(), [Node(data='n121'),Node(data='n122'),Node(data='n123')])
+ n12 = sess.query(Node).filter(Node.data == 'n12').one()
+ eq_(sess.query(Node).filter(Node.parent == n12).all(),
+ [Node(data='n121'), Node(data='n122'), Node(data='n123')])
+
+ eq_(sess.query(Node).filter(Node.parent != n12).all(),
+ [Node(data='n1'), Node(data='n11'), Node(data='n12'),
+ Node(data='n13')])
- eq_(sess.query(Node).filter(Node.parent != n12).all(), [Node(data='n1'), Node(data='n11'), Node(data='n12'), Node(data='n13')])
class SelfReferentialM2MTest(fixtures.MappedTest):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
nodes = Table('nodes', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
- node_to_nodes =Table('node_to_nodes', metadata,
- Column('left_node_id', Integer, ForeignKey('nodes.id'),primary_key=True),
- Column('right_node_id', Integer, ForeignKey('nodes.id'),primary_key=True),
- )
+ node_to_nodes = Table('node_to_nodes', metadata,
+ Column('left_node_id', Integer, ForeignKey(
+ 'nodes.id'), primary_key=True),
+ Column('right_node_id', Integer, ForeignKey(
+ 'nodes.id'), primary_key=True))
@classmethod
def setup_classes(cls):
@classmethod
def insert_data(cls):
Node, nodes, node_to_nodes = (cls.classes.Node,
- cls.tables.nodes,
- cls.tables.node_to_nodes)
-
+ cls.tables.nodes,
+ cls.tables.node_to_nodes)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy='select', secondary=node_to_nodes,
- primaryjoin=nodes.c.id==node_to_nodes.c.left_node_id,
- secondaryjoin=nodes.c.id==node_to_nodes.c.right_node_id,
- )
+ 'children': relationship(
+ Node, lazy='select',
+ secondary=node_to_nodes,
+ primaryjoin=nodes.c.id == node_to_nodes.c.left_node_id,
+ secondaryjoin=nodes.c.id == node_to_nodes.c.right_node_id)
})
sess = create_session()
n1 = Node(data='n1')
Node = self.classes.Node
sess = create_session()
- eq_(sess.query(Node).filter(Node.children.any(Node.data == 'n3'
- )).order_by(Node.data).all(), [Node(data='n1'), Node(data='n2')])
+ eq_(sess.query(Node).filter(Node.children.any(Node.data == 'n3'))
+ .order_by(Node.data).all(),
+ [Node(data='n1'), Node(data='n2')])
def test_contains(self):
Node = self.classes.Node
sess = create_session()
n4 = sess.query(Node).filter_by(data='n4').one()
- eq_(sess.query(Node).filter(Node.children.contains(n4)).order_by(Node.data).all(),
+ eq_(sess.query(Node).filter(Node.children.contains(n4))
+ .order_by(Node.data).all(),
[Node(data='n1'), Node(data='n3')])
- eq_(sess.query(Node).filter(not_(Node.children.contains(n4))).order_by(Node.data).all(),
+ eq_(sess.query(Node).filter(not_(Node.children.contains(n4)))
+ .order_by(Node.data).all(),
[Node(data='n2'), Node(data='n4'), Node(data='n5'),
- Node(data='n6'), Node(data='n7')])
+ Node(data='n6'), Node(data='n7')])
def test_explicit_join(self):
Node = self.classes.Node
sess = create_session()
n1 = aliased(Node)
- eq_(
- sess.query(Node).select_from(join(Node, n1, 'children'
- )).filter(n1.data.in_(['n3', 'n7'
- ])).order_by(Node.id).all(),
- [Node(data='n1'), Node(data='n2')]
- )
-
-
+ eq_(sess.query(Node).select_from(join(Node, n1, 'children'))
+ .filter(n1.data.in_(['n3', 'n7'])).order_by(Node.id).all(),
+ [Node(data='n1'), Node(data='n2')])
addresses=relationship(Address, lazy='select'),
))
q = create_session().query(User)
- l = q.filter(users.c.id == addresses.c.user_id).\
+ result = q.filter(users.c.id == addresses.c.user_id).\
order_by(addresses.c.email_address).all()
assert [
User(id=8, addresses=[
User(id=7, addresses=[
Address(id=1)
]),
- ] == l
+ ] == result
def test_orderby_desc(self):
Address, addresses, users, User = (
q = sess.query(User)
if testing.against('mssql'):
- l = q.limit(2).all()
- assert self.static.user_all_result[:2] == l
+ result = q.limit(2).all()
+ assert self.static.user_all_result[:2] == result
else:
- l = q.limit(2).offset(1).all()
- assert self.static.user_all_result[1:3] == l
+ result = q.limit(2).offset(1).all()
+ assert self.static.user_all_result[1:3] == result
def test_distinct(self):
users, items, order_items, orders, \
s = sa.union_all(
u2.select(use_labels=True),
u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
- l = q.filter(s.c.u2_id == User.id).order_by(User.id).distinct().all()
- eq_(self.static.user_all_result, l)
+ result = q.filter(s.c.u2_id == User.id).order_by(User.id).distinct() \
+ .all()
+ eq_(self.static.user_all_result, result)
def test_uselist_false_warning(self):
"""test that multiple rows received by a
mapper(Address, addresses), lazy='select', uselist=False)
))
q = create_session().query(User)
- l = q.filter(users.c.id == 7).all()
- assert [User(id=7, address=Address(id=1))] == l
+ result = q.filter(users.c.id == 7).all()
+ assert [User(id=7, address=Address(id=1))] == result
def test_many_to_one_binds(self):
Address, addresses, users, User = (
'city': relationship(City,
primaryjoin=and_(
person.c.city_id == city.c.id,
- city.c.deleted == False),
- backref='people'
- )
+ city.c.deleted == False), # noqa
+ backref='people')
})
mapper(City, city)
class FlushOnPendingTest(AssertsExecutionResults, fixtures.TestBase):
def setUp(self):
global Parent, Child, Base
- Base= declarative_base()
+ Base = declarative_base()
class Parent(Base):
__tablename__ = 'parent'
- id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
name = Column(String(50), nullable=False)
children = relationship("Child", load_on_pending=True)
class Child(Base):
__tablename__ = 'child'
- id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
parent_id = Column(Integer, ForeignKey('parent.id'))
Base.metadata.create_all(engine)
assert p1.children == []
self.assert_sql_count(testing.db, go, 0)
+
class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
def setUp(self):
global Parent, Child, Base
- Base= declarative_base()
+ Base = declarative_base()
class Parent(Base):
__tablename__ = 'parent'
- __table_args__ = {'mysql_engine':'InnoDB'}
+ __table_args__ = {'mysql_engine': 'InnoDB'}
- id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
class Child(Base):
__tablename__ = 'child'
- __table_args__ = {'mysql_engine':'InnoDB'}
+ __table_args__ = {'mysql_engine': 'InnoDB'}
- id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
parent_id = Column(Integer, ForeignKey('parent.id'))
parent = relationship(Parent, backref=backref("children"))
p2 = Parent(id=p1.id)
sess.add(p2)
# load should emit since PK is populated
+
def go():
assert p2.children
self.assert_sql_count(testing.db, go, 1)
sess.add(p2)
# load should not emit since "None" is the bound
# param list
+
def go():
assert not p2.children
self.assert_sql_count(testing.db, go, 0)
c3.parent = p1
c3.parent = p1
c3.parent = p1
- assert len(p1.children)== 2
+ assert len(p1.children) == 2
def test_m2o_lazy_loader_on_persistent(self):
"""Compare the behaviors from the lazyloader using
sess.expire(c1, ['parent'])
# old 0.6 behavior
- #if manualflush and (not loadrel or fake_autoexpire):
+ # if manualflush and (not loadrel or
+ # fake_autoexpire):
# # a flush occurs, we get p2
# assert c1.parent is p2
- #elif not loadrel and not loadfk:
- # # problematically - we get None since committed state
- # # is empty when c1.parent_id was mutated, since we want
+ # elif not loadrel and not loadfk:
+ # # problematically - we get None since
+ # # committed state
+ # # is empty when c1.parent_id was mutated,
+ # # since we want
# # to save on selects. this is
- # # why the patch goes in in 0.6 - this is mostly a bug.
+ # # why the patch goes in in 0.6 - this is
+ # # mostly a bug.
# assert c1.parent is None
- #else:
- # # if things were loaded, autoflush doesn't even
- # # happen.
+ # else:
+ # # if things were loaded, autoflush doesn't
+ # # even happen.
# assert c1.parent is p1
# new behavior
c2.parent_id = p2.id
if manualflush:
- sess.flush()
+ sess.flush()
if loadonpending or manualflush:
assert c2.parent is p2
for autoflush in (False, True):
for manualflush in (False, True):
for enable_relationship_rel in (False, True):
- Child.parent.property.load_on_pending = loadonpending
+ Child.parent.property.load_on_pending = \
+ loadonpending
sess.autoflush = autoflush
c2 = Child()
c2.parent_id = p2.id
if manualflush:
- sess.flush()
+ sess.flush()
- if (loadonpending and attach) or enable_relationship_rel:
+ if (loadonpending and attach) \
+ or enable_relationship_rel:
assert c2.parent is p2
else:
assert c2.parent is None
s = Session()
u1, u2, u3, u4 = User(id=1, name='u1'), User(id=2, name='u2'), \
- User(id=7, name='u3'), User(id=8, name='u4')
+ User(id=7, name='u3'), User(id=8, name='u4')
s.query(User).filter(User.id.in_([7, 8])).all()
s.close()
return s, [u1, u2, u3, u4]
User = self.classes.User
q = s.query(User, User.id)
- kt = lambda *x: KeyedTuple(x, ['User', 'id'])
+
+ def kt(*x):
+ return KeyedTuple(x, ['User', 'id'])
+
collection = [kt(u1, 1), kt(u2, 2), kt(u3, 7), kt(u4, 8)]
it = loading.merge_result(
q,
ua = aliased(User)
q = s.query(User, ua)
- kt = lambda *x: KeyedTuple(x, ['User', 'useralias'])
+
+ def kt(*x):
+ return KeyedTuple(x, ['User', 'useralias'])
+
collection = [kt(u1, u2), kt(u1, None), kt(u2, u3)]
it = loading.merge_result(
q,
],
[(u1.id, u2.id), (u1.id, None), (u2.id, u3.id)]
)
-
-
User = self.classes.User
sess = Session()
assert_raises_message(
- exc.ArgumentError, "Unknown with_lockmode argument: 'unknown_mode'",
+ exc.ArgumentError,
+ "Unknown with_lockmode argument: 'unknown_mode'",
sess.query(User.id).with_lockmode, 'unknown_mode'
)
mapper(User, users)
def _assert(self, read=False, nowait=False, of=None, key_share=None,
- assert_q_of=None, assert_sel_of=None):
+ assert_q_of=None, assert_sel_of=None):
User = self.classes.User
s = Session()
- q = s.query(User).with_for_update(read=read, nowait=nowait, of=of, key_share=key_share)
+ q = s.query(User).with_for_update(
+ read=read, nowait=nowait, of=of, key_share=key_share)
sel = q._compile_context().statement
assert q._for_update_arg.read is read
assert_sel_of=[users.c.id]
)
+
class CompileTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""run some compile tests, even though these are redundant."""
run_inserts = None
def test_default_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect=default.DefaultDialect()
)
def test_not_supported_by_dialect_should_just_use_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(read=True),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(read=True),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect=default.DefaultDialect()
)
User = self.classes.User
sess = Session()
self.assert_compile(sess.query(User.id).with_for_update(read=True),
- "SELECT users.id AS users_id FROM users FOR SHARE",
- dialect="postgresql"
- )
+ "SELECT users.id AS users_id FROM users FOR SHARE",
+ dialect="postgresql")
def test_postgres_read_nowait(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).
- with_for_update(read=True, nowait=True),
+ self.assert_compile(
+ sess.query(User.id).
+ with_for_update(read=True, nowait=True),
"SELECT users.id AS users_id FROM users FOR SHARE NOWAIT",
dialect="postgresql"
)
def test_postgres_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect="postgresql"
)
def test_postgres_update_of(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(of=User.id),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(of=User.id),
"SELECT users.id AS users_id FROM users FOR UPDATE OF users",
dialect="postgresql"
)
def test_postgres_update_of_entity(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(of=User),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(of=User),
"SELECT users.id AS users_id FROM users FOR UPDATE OF users",
dialect="postgresql"
)
Address = self.classes.Address
sess = Session()
- self.assert_compile(sess.query(User.id, Address.id).
- with_for_update(of=[User, Address]),
+ self.assert_compile(
+ sess.query(User.id, Address.id).
+ with_for_update(of=[User, Address]),
"SELECT users.id AS users_id, addresses.id AS addresses_id "
"FROM users, addresses FOR UPDATE OF users, addresses",
dialect="postgresql"
def test_postgres_for_no_key_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(key_share=True),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(key_share=True),
"SELECT users.id AS users_id FROM users FOR NO KEY UPDATE",
dialect="postgresql"
)
def test_postgres_for_no_key_nowait_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(key_share=True, nowait=True),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(key_share=True, nowait=True),
"SELECT users.id AS users_id FROM users FOR NO KEY UPDATE NOWAIT",
dialect="postgresql"
)
def test_postgres_update_of_list(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).
- with_for_update(of=[User.id, User.id, User.id]),
+ self.assert_compile(
+ sess.query(User.id)
+ .with_for_update(of=[User.id, User.id, User.id]),
"SELECT users.id AS users_id FROM users FOR UPDATE OF users",
dialect="postgresql"
- )
+ )
def test_postgres_update_skip_locked(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).
- with_for_update(skip_locked=True),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(skip_locked=True),
"SELECT users.id AS users_id FROM users FOR UPDATE SKIP LOCKED",
dialect="postgresql"
)
-
def test_oracle_update(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(),
"SELECT users.id AS users_id FROM users FOR UPDATE",
dialect="oracle"
)
def test_oracle_update_skip_locked(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id)
- .with_for_update(skip_locked=True),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(skip_locked=True),
"SELECT users.id AS users_id FROM users FOR UPDATE SKIP LOCKED",
dialect="oracle"
)
def test_mysql_read(self):
User = self.classes.User
sess = Session()
- self.assert_compile(sess.query(User.id).with_for_update(read=True),
+ self.assert_compile(
+ sess.query(User.id).with_for_update(read=True),
"SELECT users.id AS users_id FROM users LOCK IN SHARE MODE",
dialect="mysql"
)
@classmethod
def define_tables(cls, metadata):
Table('place', metadata,
- Column('place_id', Integer, test_needs_autoincrement=True,
- primary_key=True),
- Column('name', String(30), nullable=False),
- test_needs_acid=True,
- )
+ Column('place_id', Integer, test_needs_autoincrement=True,
+ primary_key=True),
+ Column('name', String(30), nullable=False),
+ test_needs_acid=True)
Table('transition', metadata,
- Column('transition_id', Integer,
- test_needs_autoincrement=True, primary_key=True),
- Column('name', String(30), nullable=False),
- test_needs_acid=True,
- )
+ Column('transition_id', Integer,
+ test_needs_autoincrement=True, primary_key=True),
+ Column('name', String(30), nullable=False),
+ test_needs_acid=True)
Table('place_thingy', metadata,
- Column('thingy_id', Integer, test_needs_autoincrement=True,
- primary_key=True),
- Column('place_id', Integer, ForeignKey('place.place_id'),
- nullable=False),
- Column('name', String(30), nullable=False),
- test_needs_acid=True,
- )
+ Column('thingy_id', Integer, test_needs_autoincrement=True,
+ primary_key=True),
+ Column('place_id', Integer, ForeignKey('place.place_id'),
+ nullable=False),
+ Column('name', String(30), nullable=False),
+ test_needs_acid=True)
# association table #1
Table('place_input', metadata,
- Column('place_id', Integer, ForeignKey('place.place_id')),
- Column('transition_id', Integer,
- ForeignKey('transition.transition_id')),
- test_needs_acid=True,
- )
+ Column('place_id', Integer, ForeignKey('place.place_id')),
+ Column('transition_id', Integer,
+ ForeignKey('transition.transition_id')),
+ test_needs_acid=True)
# association table #2
Table('place_output', metadata,
- Column('place_id', Integer, ForeignKey('place.place_id')),
- Column('transition_id', Integer,
- ForeignKey('transition.transition_id')),
- test_needs_acid=True,
- )
+ Column('place_id', Integer, ForeignKey('place.place_id')),
+ Column('transition_id', Integer,
+ ForeignKey('transition.transition_id')),
+ test_needs_acid=True)
Table('place_place', metadata,
Column('pl1_id', Integer, ForeignKey('place.place_id')),
Column('pl2_id', Integer, ForeignKey('place.place_id')),
- test_needs_acid=True,
- )
+ test_needs_acid=True)
@classmethod
def setup_classes(cls):
self.name = name
def test_overlapping_attribute_error(self):
- place, Transition, place_input, Place, transition = (self.tables.place,
- self.classes.Transition,
- self.tables.place_input,
- self.classes.Place,
- self.tables.transition)
+ place, Transition, place_input, Place, transition = (
+ self.tables.place,
+ self.classes.Transition,
+ self.tables.place_input,
+ self.classes.Place,
+ self.tables.transition)
mapper(Place, place, properties={
'transitions': relationship(Transition,
- secondary=place_input, backref='places')
+ secondary=place_input,
+ backref='places')
})
mapper(Transition, transition, properties={
'places': relationship(Place,
- secondary=place_input, backref='transitions')
+ secondary=place_input,
+ backref='transitions')
})
assert_raises_message(sa.exc.ArgumentError,
- "property of that name exists",
- sa.orm.configure_mappers)
+ "property of that name exists",
+ sa.orm.configure_mappers)
def test_self_referential_roundtrip(self):
place, Place, place_place = (self.tables.place,
- self.classes.Place,
- self.tables.place_place)
+ self.classes.Place,
+ self.tables.place_place)
mapper(Place, place, properties={
'places': relationship(
- Place,
- secondary=place_place,
- primaryjoin=place.c.place_id == place_place.c.pl1_id,
- secondaryjoin=place.c.place_id == place_place.c.pl2_id,
- order_by=place_place.c.pl2_id
- )
+ Place,
+ secondary=place_place,
+ primaryjoin=place.c.place_id == place_place.c.pl1_id,
+ secondaryjoin=place.c.place_id == place_place.c.pl2_id,
+ order_by=place_place.c.pl2_id
+ )
})
sess = Session()
def test_self_referential_bidirectional_mutation(self):
place, Place, place_place = (self.tables.place,
- self.classes.Place,
- self.tables.place_place)
+ self.classes.Place,
+ self.tables.place_place)
mapper(Place, place, properties={
'child_places': relationship(
- Place,
- secondary=place_place,
- primaryjoin=place.c.place_id == place_place.c.pl1_id,
- secondaryjoin=place.c.place_id == place_place.c.pl2_id,
- order_by=place_place.c.pl2_id,
- backref='parent_places'
- )
+ Place,
+ secondary=place_place,
+ primaryjoin=place.c.place_id == place_place.c.pl1_id,
+ secondaryjoin=place.c.place_id == place_place.c.pl2_id,
+ order_by=place_place.c.pl2_id,
+ backref='parent_places'
+ )
})
sess = Session()
assert p1 in p2.parent_places
assert p2 in p1.parent_places
-
def test_joinedload_on_double(self):
"""test that a mapper can have two eager relationships to the same table, via
two different association tables. aliases are required."""
place_input, transition, Transition, PlaceThingy, \
- place, place_thingy, Place, \
- place_output = (self.tables.place_input,
- self.tables.transition,
- self.classes.Transition,
- self.classes.PlaceThingy,
- self.tables.place,
- self.tables.place_thingy,
- self.classes.Place,
- self.tables.place_output)
-
+ place, place_thingy, Place, \
+ place_output = (self.tables.place_input,
+ self.tables.transition,
+ self.classes.Transition,
+ self.classes.PlaceThingy,
+ self.tables.place,
+ self.tables.place_thingy,
+ self.classes.Place,
+ self.tables.place_output)
mapper(PlaceThingy, place_thingy)
mapper(Place, place, properties={
mapper(Transition, transition, properties=dict(
inputs=relationship(Place, place_output, lazy='joined'),
- outputs=relationship(Place, place_input, lazy='joined'),
- )
+ outputs=relationship(Place, place_input, lazy='joined'))
)
tran = Transition('transition1')
r = sess.query(Transition).all()
self.assert_unordered_result(r, Transition,
- {'name': 'transition1',
- 'inputs': (Place, [{'name': 'place1'}]),
- 'outputs': (Place, [{'name': 'place2'}, {'name': 'place3'}])
- })
+ {'name': 'transition1',
+ 'inputs': (Place, [{'name': 'place1'}]),
+ 'outputs': (Place, [{'name': 'place2'},
+ {'name': 'place3'}])
+ })
def test_bidirectional(self):
place_input, transition, Transition, Place, place, place_output = (
- self.tables.place_input,
- self.tables.transition,
- self.classes.Transition,
- self.classes.Place,
- self.tables.place,
- self.tables.place_output)
+ self.tables.place_input,
+ self.tables.transition,
+ self.classes.Transition,
+ self.classes.Place,
+ self.tables.place,
+ self.tables.place_output)
mapper(Place, place)
mapper(Transition, transition, properties=dict(
- inputs=relationship(Place, place_output,
- backref=backref('inputs',
- order_by=transition.c.transition_id),
- order_by=Place.place_id),
- outputs=relationship(Place, place_input,
- backref=backref('outputs',
- order_by=transition.c.transition_id),
- order_by=Place.place_id),
- )
+ inputs=relationship(
+ Place, place_output,
+ backref=backref('inputs', order_by=transition.c.transition_id),
+ order_by=Place.place_id),
+ outputs=relationship(
+ Place, place_input,
+ backref=backref('outputs',
+ order_by=transition.c.transition_id),
+ order_by=Place.place_id),
+ )
)
t1 = Transition('transition1')
sess.commit()
self.assert_result([t1],
- Transition, {'outputs':
- (Place, [{'name': 'place3'}, {'name': 'place1'}])})
+ Transition, {'outputs':
+ (Place, [{'name': 'place3'},
+ {'name': 'place1'}])})
self.assert_result([p2],
- Place, {'inputs':
- (Transition, [{'name': 'transition1'},
- {'name': 'transition2'}])})
+ Place, {'inputs':
+ (Transition, [{'name': 'transition1'},
+ {'name': 'transition2'}])})
@testing.requires.sane_multi_rowcount
def test_stale_conditions(self):
Place, Transition, place_input, place, transition = (
- self.classes.Place,
- self.classes.Transition,
- self.tables.place_input,
- self.tables.place,
- self.tables.transition)
+ self.classes.Place,
+ self.classes.Transition,
+ self.tables.place_input,
+ self.tables.place,
+ self.tables.transition)
mapper(Place, place, properties={
'transitions': relationship(Transition, secondary=place_input,
- passive_updates=False)
+ passive_updates=False)
})
mapper(Transition, transition)
@classmethod
def define_tables(cls, metadata):
Table("left", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
Table("right", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(30)))
Table('secondary', metadata,
- Column('left_id', Integer, ForeignKey('left.id'),
- primary_key=True),
- Column('right_id', Integer, ForeignKey('right.id'),
- primary_key=True),
- )
+ Column('left_id', Integer, ForeignKey('left.id'),
+ primary_key=True),
+ Column('right_id', Integer, ForeignKey('right.id'),
+ primary_key=True))
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
def _standard_bidirectional_fixture(self):
left, secondary, right = self.tables.left, \
- self.tables.secondary, self.tables.right
+ self.tables.secondary, self.tables.right
A, B = self.classes.A, self.classes.B
mapper(A, left, properties={
'bs': relationship(B, secondary=secondary,
- backref='as', order_by=right.c.id)
+ backref='as', order_by=right.c.id)
})
mapper(B, right)
def _bidirectional_onescalar_fixture(self):
left, secondary, right = self.tables.left, \
- self.tables.secondary, self.tables.right
+ self.tables.secondary, self.tables.right
A, B = self.classes.A, self.classes.B
mapper(A, left, properties={
'bs': relationship(B, secondary=secondary,
- backref=backref('a', uselist=False),
- order_by=right.c.id)
+ backref=backref('a', uselist=False),
+ order_by=right.c.id)
})
mapper(B, right)
]
eq_(len(b_calls), 3)
-
def test_check_descriptor_as_method(self):
User, users = self.classes.User, self.tables.users
cls = self.prop.parent.class_
col = getattr(cls, 'name')
if other is None:
- return col == None
+ return col is None
else:
return sa.func.upper(col) == sa.func.upper(other)
mapper(User, usersaddresses, primary_key=[users.c.id],
properties={'add_id': addresses.c.id}
)
- l = create_session().query(User).order_by(users.c.id).all()
- eq_(l, self.static.user_result[:3])
+ result = create_session().query(User).order_by(users.c.id).all()
+ eq_(result, self.static.user_result[:3])
def test_mapping_to_join_exclude_prop(self):
"""Mapping to a join"""
mapper(User, usersaddresses, primary_key=[users.c.id],
exclude_properties=[addresses.c.id]
)
- l = create_session().query(User).order_by(users.c.id).all()
- eq_(l, self.static.user_result[:3])
+ result = create_session().query(User).order_by(users.c.id).all()
+ eq_(result, self.static.user_result[:3])
def test_mapping_to_join_no_pk(self):
email_bounces, addresses, Address = (self.tables.email_bounces,
address_id=addresses.c.id))
session = create_session()
- l = session.query(User).order_by(User.id, User.address_id).all()
+ result = session.query(User).order_by(User.id, User.address_id).all()
- eq_(l, [
+ eq_(result, [
User(id=7, address_id=1),
User(id=8, address_id=2),
User(id=8, address_id=3),
address_id=addresses.c.id))
session = create_session()
- l = session.query(User).order_by(User.id, User.address_id).all()
+ result = session.query(User).order_by(User.id, User.address_id).all()
- eq_(l, [
+ eq_(result, [
User(id=7, address_id=1),
User(id=8, address_id=2),
User(id=8, address_id=3),
orders=relationship(Order)))
session = create_session()
- l = (session.query(User).
- select_from(users.join(orders).
- join(order_items).
- join(items)).
- filter(items.c.description == 'item 4')).all()
+ result = (session.query(User).
+ select_from(users.join(orders).
+ join(order_items).
+ join(items)).
+ filter(items.c.description == 'item 4')).all()
- eq_(l, [self.static.user_result[0]])
+ eq_(result, [self.static.user_result[0]])
@testing.uses_deprecated("Mapper.order_by")
def test_cancel_order_by(self):
mapper(User, s)
sess = create_session()
- l = sess.query(User).order_by(s.c.id).all()
+ result = sess.query(User).order_by(s.c.id).all()
for idx, total in enumerate((14, 16)):
- eq_(l[idx].concat, l[idx].id * 2)
- eq_(l[idx].concat, total)
+ eq_(result[idx].concat, result[idx].id * 2)
+ eq_(result[idx].concat, total)
def test_count(self):
"""The count function on Query."""
cls = self.prop.parent.class_
col = getattr(cls, 'name')
if other is None:
- return col == None
+ return col is None
else:
return sa.func.upper(col) == sa.func.upper(other)
order_by=addresses.c.id)))
sess = create_session()
- l = (sess.query(User).
- order_by(User.id).
- options(sa.orm.joinedload('addresses'))).all()
+ result = (sess.query(User).
+ order_by(User.id).
+ options(sa.orm.joinedload('addresses'))).all()
def go():
- eq_(l, self.static.user_address_result)
+ eq_(result, self.static.user_address_result)
self.sql_count_(0, go)
def test_eager_options_with_limit(self):
# first test straight eager load, 1 statement
def go():
- l = sess.query(User).order_by(User.id).all()
- eq_(l, self.static.user_address_result)
+ result = sess.query(User).order_by(User.id).all()
+ eq_(result, self.static.user_address_result)
self.sql_count_(1, go)
sess.expunge_all()
r = users.select().order_by(users.c.id).execute()
def go():
- l = list(sess.query(User).instances(r))
- eq_(l, self.static.user_address_result)
+ result = list(sess.query(User).instances(r))
+ eq_(result, self.static.user_address_result)
self.sql_count_(4, go)
def test_eager_degrade_deep(self):
# first test straight eager load, 1 statement
def go():
- l = sess.query(User).order_by(User.id).all()
- eq_(l, self.static.user_all_result)
+ result = sess.query(User).order_by(User.id).all()
+ eq_(result, self.static.user_all_result)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
r = users.select().execute()
def go():
- l = list(sess.query(User).instances(r))
- eq_(l, self.static.user_all_result)
+ result = list(sess.query(User).instances(r))
+ eq_(result, self.static.user_all_result)
self.assert_sql_count(testing.db, go, 6)
def test_lazy_options(self):
))
sess = create_session()
- l = (sess.query(User).
- order_by(User.id).
- options(sa.orm.lazyload('addresses'))).all()
+ result = (sess.query(User).
+ order_by(User.id).
+ options(sa.orm.lazyload('addresses'))).all()
def go():
- eq_(l, self.static.user_address_result)
+ eq_(result, self.static.user_address_result)
self.sql_count_(4, go)
def test_option_propagate(self):
sess = create_session()
- l = (sess.query(User).
- order_by(User.id).
- options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+ result = (sess.query(User).
+ order_by(User.id).
+ options(
+ sa.orm.joinedload_all('orders.items.keywords'))).all()
def go():
- l[0].orders[1].items[0].keywords[1]
+ result[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
sess = create_session()
- l = (sess.query(User).
- options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+ result = (sess.query(User).
+ options(
+ sa.orm.subqueryload_all('orders.items.keywords'))).all()
def go():
- l[0].orders[1].items[0].keywords[1]
+ result[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
def test_deep_options_3(self):
mapper(Child1, child1, inherits=Base,
polymorphic_identity='child1',
properties={
- 'child2': relationship(Child2,
- primaryjoin=child1.c.child2id == base.c.id,
- foreign_keys=child1.c.child2id)
+ 'child2': relationship(
+ Child2,
+ primaryjoin=child1.c.child2id == base.c.id,
+ foreign_keys=child1.c.child2id)
})
mapper(Child2, child2, inherits=Base, polymorphic_identity='child2')
mapper(Related, related)
addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m)
- l = [None]
+ result = [None]
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
- l[0] = x
+ result[0] = x
self.assert_sql_count(testing.db, go, 1)
self.assert_result(
- l[0], User,
+ result[0], User,
{'id': 7, 'addresses': (Address, [])},
)
addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m).options(sa.orm.lazyload('addresses'))
- l = [None]
+ result = [None]
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
- l[0] = x
+ result[0] = x
self.sql_count_(2, go)
self.assert_result(
- l[0], User,
+ result[0], User,
{'id': 7, 'addresses': (Address, [{'id': 1}])},
)
addresses=relationship(Address, lazy='raise')
))
q = create_session().query(User)
- l = [None]
+ result = [None]
def go():
x = q.filter(User.id == 7).all()
sa.exc.InvalidRequestError,
"'User.addresses' is not available due to lazy='raise'",
lambda: x[0].addresses)
- l[0] = x
+ result[0] = x
self.assert_sql_count(testing.db, go, 1)
self.assert_result(
- l[0], User,
+ result[0], User,
{'id': 7},
)
addresses=relationship(Address)
))
q = create_session().query(User)
- l = [None]
+ result = [None]
def go():
x = q.options(
sa.exc.InvalidRequestError,
"'User.addresses' is not available due to lazy='raise'",
lambda: x[0].addresses)
- l[0] = x
+ result[0] = x
self.assert_sql_count(testing.db, go, 1)
self.assert_result(
- l[0], User,
+ result[0], User,
{'id': 7},
)
addresses=relationship(Address, lazy='raise')
))
q = create_session().query(User).options(sa.orm.lazyload('addresses'))
- l = [None]
+ result = [None]
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
- l[0] = x
+ result[0] = x
self.sql_count_(2, go)
self.assert_result(
- l[0], User,
+ result[0], User,
{'id': 7, 'addresses': (Address, [{'id': 1}])},
)
'user': relationship(
User,
primaryjoin=sa.and_(
- addresses.c.user_id == users.c.id ,
- users.c.name != None
+ addresses.c.user_id == users.c.id,
+ users.c.name != None # noqa
)
)
})
h1.h1s.append(H1())
s.flush()
- eq_(
- select([func.count('*')]).select_from(ht1)
- .scalar(), 4)
+ eq_(select([func.count('*')]).select_from(ht1).scalar(), 4)
h6 = H6()
h6.h1a = h1
from sqlalchemy import event, and_, case
from sqlalchemy.testing.schema import Table, Column
+
class MergeTest(_fixtures.FixtureTest):
"""Session.merge() functionality"""
mapper(User, users)
sess = create_session()
u = User(name='fred')
+
def go():
sess.merge(u)
self.assert_sql_count(testing.db, go, 0)
def test_transient_to_pending_collection(self):
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
mapper(User, users, properties={
'addresses': relationship(Address, backref='user',
- collection_class=OrderedSet)})
+ collection_class=OrderedSet)})
mapper(Address, addresses)
load = self.load_tracker(User)
self.load_tracker(Address, load)
u = User(id=7, name='fred', addresses=OrderedSet([
Address(id=1, email_address='fred1'),
Address(id=2, email_address='fred2'),
- ]))
+ ]))
eq_(load.called, 0)
sess = create_session()
User(id=7, name='fred', addresses=OrderedSet([
Address(id=1, email_address='fred1'),
Address(id=2, email_address='fred2'),
- ]))
- )
+ ])))
def test_transient_to_persistent(self):
User, users = self.classes.User, self.tables.users
def test_transient_to_persistent_collection(self):
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- backref='user',
- collection_class=OrderedSet,
- order_by=addresses.c.id,
- cascade="all, delete-orphan")
+ 'addresses': relationship(Address,
+ backref='user',
+ collection_class=OrderedSet,
+ order_by=addresses.c.id,
+ cascade="all, delete-orphan")
})
mapper(Address, addresses)
User(id=7, name='fred', addresses=OrderedSet([
Address(id=3, email_address='fred3'),
Address(id=4, email_address='fred4'),
- ]))
- )
+ ])))
sess.flush()
sess.expunge_all()
eq_(sess.query(User).one(),
User(id=7, name='fred', addresses=OrderedSet([
Address(id=3, email_address='fred3'),
Address(id=4, email_address='fred4'),
- ]))
- )
+ ])))
def test_detached_to_persistent_collection(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- backref='user',
- order_by=addresses.c.id,
- collection_class=OrderedSet)})
+ 'addresses': relationship(Address,
+ backref='user',
+ order_by=addresses.c.id,
+ collection_class=OrderedSet)})
mapper(Address, addresses)
load = self.load_tracker(User)
self.load_tracker(Address, load)
sess.flush()
sess.expunge_all()
- u.name='fred jones'
+ u.name = 'fred jones'
u.addresses.add(Address(id=3, email_address='fred3'))
u.addresses.remove(a)
entities, with a bidirectional relationship."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- cascade="all", backref="user")
+ 'addresses': relationship(mapper(Address, addresses),
+ cascade="all", backref="user")
})
load = self.load_tracker(User)
self.load_tracker(Address, load)
eq_(u,
User(id=7, name='fred', addresses=[
- Address(email_address='foo@bar.com'),
- Address(email_address='hoho@bar.com')]))
+ Address(email_address='foo@bar.com'),
+ Address(email_address='hoho@bar.com')]))
eq_(u2,
User(id=7, name='fred', addresses=[
- Address(email_address='foo@bar.com'),
- Address(email_address='hoho@bar.com')]))
+ Address(email_address='foo@bar.com'),
+ Address(email_address='hoho@bar.com')]))
sess.flush()
sess.expunge_all()
u7 = sess.merge(User(id=3))
assert u6.__dict__['data'] is None
-
def test_merge_irregular_collection(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
'addresses': relationship(
mapper(Address, addresses),
backref='user',
- collection_class=
- attribute_mapped_collection('email_address')),
- })
+ collection_class=attribute_mapped_collection('email_address')),
+ })
u1 = User(id=7, name='fred')
u1.addresses['foo@bar.com'] = Address(email_address='foo@bar.com')
sess = create_session()
persistent entities."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- backref='user')
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user')
})
load = self.load_tracker(User)
self.load_tracker(Address, load)
# set up data and save
u = User(id=7, name='fred', addresses=[
Address(email_address='foo@bar.com'),
- Address(email_address = 'hoho@la.com')])
+ Address(email_address='hoho@la.com')])
sess.add(u)
sess.flush()
u2 = sess2.query(User).get(7)
eq_(u2,
User(id=7, name='fred', addresses=[
- Address(email_address='foo@bar.com'),
- Address(email_address='hoho@la.com')]))
+ Address(email_address='foo@bar.com'),
+ Address(email_address='hoho@la.com')]))
# make local changes to data
u.name = 'fred2'
assert len(u.addresses)
for a in u.addresses:
assert a.user is u
+
def go():
sess4.flush()
# no changes; therefore flush should do nothing
assert len(u.addresses)
for a in u.addresses:
assert a.user is u
+
def go():
sess5.flush()
# no changes; therefore flush should do nothing
sess4 = create_session()
u = sess4.merge(u, load=False)
# post merge change
- u.addresses[1].email_address='afafds'
+ u.addresses[1].email_address = 'afafds'
+
def go():
sess4.flush()
# afafds change flushes
"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User, cascade="save-update")
+ 'user': relationship(User, cascade="save-update")
})
mapper(User, users)
sess = create_session()
def test_one_to_many_cascade(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses))})
+ 'addresses': relationship(mapper(Address, addresses))})
load = self.load_tracker(User)
self.load_tracker(Address, load)
def test_many_to_one_cascade(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User)
+ 'user': relationship(User)
})
mapper(User, users)
u1 = User(id=1, name="u1")
- a1 =Address(id=1, email_address="a1", user=u1)
+ a1 = Address(id=1, email_address="a1", user=u1)
u2 = User(id=2, name="u2")
sess = create_session()
def test_many_to_many_cascade(self):
items, Order, orders, order_items, Item = (self.tables.items,
- self.classes.Order,
- self.tables.orders,
- self.tables.order_items,
- self.classes.Item)
-
+ self.classes.Order,
+ self.tables.orders,
+ self.tables.order_items,
+ self.classes.Item)
mapper(Order, orders, properties={
- 'items':relationship(mapper(Item, items),
- secondary=order_items)})
+ 'items': relationship(mapper(Item, items),
+ secondary=order_items)})
load = self.load_tracker(Order)
self.load_tracker(Item, load)
sess = create_session()
i1 = Item()
- i1.description='item 1'
+ i1.description = 'item 1'
i2 = Item()
i2.description = 'item 2'
sess3 = create_session()
o3 = sess3.query(Order).get(o.id)
- eq_( load.called, 4)
+ eq_(load.called, 4)
o.description = 'desc modified'
sess3.merge(o)
def test_one_to_one_cascade(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'address':relationship(mapper(Address, addresses),
- uselist = False)
+ 'address': relationship(mapper(Address, addresses),
+ uselist=False)
})
load = self.load_tracker(User)
self.load_tracker(Address, load)
u.id = 7
u.name = "fred"
a1 = Address()
- a1.email_address='foo@bar.com'
+ a1.email_address = 'foo@bar.com'
u.address = a1
sess.add(u)
def test_value_to_none(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'address':relationship(mapper(Address, addresses),
- uselist = False, backref='user')
+ 'address': relationship(mapper(Address, addresses),
+ uselist=False, backref='user')
})
sess = sessionmaker()()
u = User(id=7, name="fred",
- address=Address(id=1, email_address='foo@bar.com'))
+ address=Address(id=1, email_address='foo@bar.com'))
sess.add(u)
sess.commit()
sess.close()
sess = create_session()
u = User()
assert_raises_message(sa.exc.InvalidRequestError,
- "load=False option does not support",
- sess.merge, u, load=False)
+ "load=False option does not support",
+ sess.merge, u, load=False)
def test_no_load_with_backrefs(self):
"""load=False populates relationships in both
directions without requiring a load"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- backref='user')
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user')
})
u = User(id=7, name='fred', addresses=[
assert 'user' not in u.addresses[1].__dict__
eq_(u.addresses[1].user, User(id=7, name='fred'))
-
def test_dontload_with_eager(self):
"""
"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses))
+ 'addresses': relationship(mapper(Address, addresses))
})
sess = create_session()
u = User()
u.id = 7
u.name = "fred"
a1 = Address()
- a1.email_address='foo@bar.com'
+ a1.email_address = 'foo@bar.com'
u.addresses.append(a1)
sess.add(u)
sess2 = create_session()
u2 = sess2.query(User).\
- options(sa.orm.joinedload('addresses')).get(7)
+ options(sa.orm.joinedload('addresses')).get(7)
sess3 = create_session()
u3 = sess3.merge(u2, load=False)
+
def go():
sess3.flush()
self.assert_sql_count(testing.db, go, 0)
sess3 = create_session()
u3 = sess3.merge(u2, load=False)
assert not sess3.dirty
+
def go():
sess3.flush()
self.assert_sql_count(testing.db, go, 0)
-
def test_no_load_sets_backrefs(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- backref='user')})
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user')})
sess = create_session()
u = User()
u.id = 7
u.name = "fred"
a1 = Address()
- a1.email_address='foo@bar.com'
+ a1.email_address = 'foo@bar.com'
u.addresses.append(a1)
sess.add(u)
sess2 = create_session()
u2 = sess2.merge(u, load=False)
assert not sess2.dirty
+
def go():
assert u2.addresses[0].user is u2
self.assert_sql_count(testing.db, go, 0)
"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- backref='user',
- cascade="all, delete-orphan")})
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user',
+ cascade="all, delete-orphan")})
sess = create_session()
u = User()
u.id = 7
u.name = "fred"
a1 = Address()
- a1.email_address='foo@bar.com'
+ a1.email_address = 'foo@bar.com'
u.addresses.append(a1)
sess.add(u)
sess.flush()
u2 = sess2.merge(u, load=False)
assert not sess2.dirty
a2 = u2.addresses[0]
- a2.email_address='somenewaddress'
+ a2.email_address = 'somenewaddress'
assert not sa.orm.object_mapper(a2)._is_orphan(
sa.orm.attributes.instance_state(a2))
sess2.flush()
# if load=False is changed to support dirty objects, this code
# needs to pass
a2 = u2.addresses[0]
- a2.email_address='somenewaddress'
+ a2.email_address = 'somenewaddress'
assert not sa.orm.object_mapper(a2)._is_orphan(
sa.orm.attributes.instance_state(a2))
sess2.flush()
class User(object):
- class Comparator(PropComparator):
- pass
+ class Comparator(PropComparator):
+ pass
- def _getValue(self):
- return self._value
+ def _getValue(self):
+ return self._value
- def _setValue(self, value):
- setattr(self, '_value', value)
+ def _setValue(self, value):
+ setattr(self, '_value', value)
- value = property(_getValue, _setValue)
+ value = property(_getValue, _setValue)
mapper(User, users, properties={
- 'uid':synonym('id'),
- 'foobar':comparable_property(User.Comparator,User.value),
+ 'uid': synonym('id'),
+ 'foobar': comparable_property(User.Comparator, User.value),
})
sess = create_session()
"""a merge test that was fixed by [ticket:1202]"""
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
s = create_session(autoflush=True, autocommit=False)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- backref='user')})
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user')})
a1 = Address(user=s.merge(User(id=1, name='ed')), email_address='x')
before_id = id(a1.user)
a2 = Address(user=s.merge(User(id=1, name='jack')),
- email_address='x')
+ email_address='x')
after_id = id(a1.user)
other_id = id(a2.user)
eq_(before_id, other_id)
def test_cascades_dont_autoflush(self):
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
sess = create_session(autoflush=True, autocommit=False)
m = mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- backref='user')})
+ 'addresses': relationship(mapper(Address, addresses),
+ backref='user')})
user = User(id=8, name='fred',
- addresses=[Address(email_address='user')])
+ addresses=[Address(email_address='user')])
merged_user = sess.merge(user)
assert merged_user in sess.new
sess.flush()
def test_cascades_dont_autoflush_2(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- backref='user',
- cascade="all, delete-orphan")
+ 'addresses': relationship(Address,
+ backref='user',
+ cascade="all, delete-orphan")
})
mapper(Address, addresses)
users, User = self.tables.users, self.classes.User
-
mapper(User, users)
u = User(id=7)
sess = create_session(autoflush=True, autocommit=False)
u = sess.merge(u)
assert not bool(attributes.instance_state(u).expired_attributes)
+
def go():
eq_(u.name, None)
self.assert_sql_count(testing.db, go, 0)
@classmethod
def define_tables(cls, metadata):
Table('user', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
Table('address', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('user.id')),
- Column('email', String(50)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('user.id')),
+ Column('email', String(50)))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
User, Address = cls.classes.User, cls.classes.Address
user, address = cls.tables.user, cls.tables.address
mapper(User, user, properties={
- 'addresses':relationship(Address, backref=
- backref('user',
- # needlessly complex primaryjoin so that the
- # use_get flag is False
- primaryjoin=and_(
- user.c.id==address.c.user_id,
- user.c.id==user.c.id
- )
- )
- )
+ 'addresses': relationship(Address,
+ backref=backref(
+ 'user',
+ # needlessly complex primaryjoin so
+ # that the use_get flag is False
+ primaryjoin=and_(
+ user.c.id == address.c.user_id,
+ user.c.id == user.c.id
+ )))
})
mapper(Address, address)
configure_mappers()
s = Session()
s.add_all([
User(id=1, name='u1', addresses=[Address(id=1, email='a1'),
- Address(id=2, email='a2')])
+ Address(id=2, email='a2')])
])
s.commit()
def test_persistent_access_none(self):
User, Address = self.classes.User, self.classes.Address
s = Session()
+
def go():
- u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2)]
- )
+ u1 = User(id=1, addresses=[Address(id=1), Address(id=2)])
u2 = s.merge(u1)
self.assert_sql_count(testing.db, go, 2)
def test_persistent_access_one(self):
User, Address = self.classes.User, self.classes.Address
s = Session()
+
def go():
- u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2)]
- )
+ u1 = User(id=1, addresses=[Address(id=1), Address(id=2)])
u2 = s.merge(u1)
a1 = u2.addresses[0]
assert a1.user is u2
def test_persistent_access_two(self):
User, Address = self.classes.User, self.classes.Address
s = Session()
+
def go():
- u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2)]
- )
+ u1 = User(id=1, addresses=[Address(id=1), Address(id=2)])
u2 = s.merge(u1)
a1 = u2.addresses[0]
assert a1.user is u2
def test_pending_access_one(self):
User, Address = self.classes.User, self.classes.Address
s = Session()
+
def go():
u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2),
- Address(id=3, email='a3')]
- )
+ addresses=[Address(id=1), Address(id=2),
+ Address(id=3, email='a3')])
u2 = s.merge(u1)
a3 = u2.addresses[2]
assert a3.user is u2
def test_pending_access_two(self):
User, Address = self.classes.User, self.classes.Address
s = Session()
+
def go():
u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2),
- Address(id=3, email='a3')]
- )
+ addresses=[Address(id=1), Address(id=2),
+ Address(id=3, email='a3')])
u2 = s.merge(u1)
a3 = u2.addresses[2]
assert a3.user is u2
@classmethod
def define_tables(cls, metadata):
Table("data", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', PickleType(comparator=operator.eq))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', PickleType(comparator=operator.eq)))
@classmethod
def setup_classes(cls):
d3 = sess.merge(d2)
eq_(d3.data, ["this", "is", "another", "list"])
+
class CompositeNullPksTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("data", metadata,
- Column('pk1', String(10), primary_key=True),
- Column('pk2', String(10), primary_key=True),
- )
+ Column('pk1', String(10), primary_key=True),
+ Column('pk2', String(10), primary_key=True))
@classmethod
def setup_classes(cls):
return sess.merge(d1)
self.assert_sql_count(testing.db, go, 0)
+
class LoadOnPendingTest(fixtures.MappedTest):
"""Test interaction of merge() with load_on_pending relationships"""
@classmethod
def define_tables(cls, metadata):
rocks_table = Table("rocks", metadata,
- Column("id", Integer, primary_key=True),
- Column("description", String(10)),
- )
+ Column("id", Integer, primary_key=True),
+ Column("description", String(10)))
bugs_table = Table("bugs", metadata,
- Column("id", Integer, primary_key=True),
- Column("rockid", Integer, ForeignKey('rocks.id')),
- )
+ Column("id", Integer, primary_key=True),
+ Column("rockid", Integer, ForeignKey('rocks.id')))
@classmethod
def setup_classes(cls):
class Rock(cls.Basic, fixtures.ComparableEntity):
pass
+
class Bug(cls.Basic, fixtures.ComparableEntity):
pass
def _setup_delete_orphan_o2o(self):
mapper(self.classes.Rock, self.tables.rocks,
- properties={'bug': relationship(self.classes.Bug,
- cascade='all,delete-orphan',
- load_on_pending=True,
- uselist=False)
- })
+ properties={'bug': relationship(self.classes.Bug,
+ cascade='all,delete-orphan',
+ load_on_pending=True,
+ uselist=False)
+ })
mapper(self.classes.Bug, self.tables.bugs)
self.sess = sessionmaker()()
# we've already passed ticket #2374 problem since merge() returned,
# but for good measure:
assert m is not r
- eq_(m,r)
+ eq_(m, r)
def test_merge_delete_orphan_o2o_none(self):
"""one to one delete_orphan relationships marked load_on_pending
self._setup_delete_orphan_o2o()
self._merge_delete_orphan_o2o_with(self.classes.Bug(id=1))
+
class PolymorphicOnTest(fixtures.MappedTest):
"""Test merge() of polymorphic object when polymorphic_on
isn't a Column"""
@classmethod
def define_tables(cls, metadata):
Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(1), nullable=False),
- Column('data', String(50)),
- )
+ Column('employee_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(1), nullable=False),
+ Column('data', String(50)))
@classmethod
def setup_classes(cls):
class Employee(cls.Basic, fixtures.ComparableEntity):
pass
+
class Manager(Employee):
pass
+
class Engineer(Employee):
pass
def _setup_polymorphic_on_mappers(self):
employee_mapper = mapper(self.classes.Employee,
- self.tables.employees,
- polymorphic_on=case(value=self.tables.employees.c.type,
- whens={
- 'E': 'employee',
- 'M': 'manager',
- 'G': 'engineer',
- 'R': 'engineer',
- }),
- polymorphic_identity='employee')
+ self.tables.employees,
+ polymorphic_on=case(
+ value=self.tables.employees.c.type,
+ whens={
+ 'E': 'employee',
+ 'M': 'manager',
+ 'G': 'engineer',
+ 'R': 'engineer',
+ }),
+ polymorphic_identity='employee')
mapper(self.classes.Manager, inherits=employee_mapper,
- polymorphic_identity='manager')
+ polymorphic_identity='manager')
mapper(self.classes.Engineer, inherits=employee_mapper,
- polymorphic_identity='engineer')
+ polymorphic_identity='engineer')
self.sess = sessionmaker()()
def test_merge_polymorphic_on(self):
self._setup_polymorphic_on_mappers()
m = self.classes.Manager(employee_id=55, type='M',
- data='original data')
+ data='original data')
self.sess.add(m)
self.sess.commit()
self.sess.expunge_all()
# we've already passed ticket #2449 problem since
# merge() returned, but for good measure:
assert m is not merged
- eq_(m,merged)
+ eq_(m, merged)
fk_args = _backend_specific_fk_args()
Table('users', metadata,
- Column('username', String(50), primary_key=True),
- Column('fullname', String(100)),
- test_needs_fk=True)
+ Column('username', String(50), primary_key=True),
+ Column('fullname', String(100)),
+ test_needs_fk=True)
Table(
'addresses', metadata,
def _test_onetomany(self, passive_updates):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(
User, users, properties={
sess.flush()
if not passive_updates:
# test passive_updates=False;
- #load addresses, update user, update 2 addresses
+ # load addresses, update user, update 2 addresses
self.assert_sql_count(testing.db, go, 3)
else:
# test passive_updates=True; update user
"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
with testing.db.begin() as conn:
conn.execute(users.insert(), username='jack', fullname='jack')
mapper(User, users)
mapper(Address, addresses, properties={
'user': relationship(User,
- passive_updates=False)
+ passive_updates=False)
})
sess = create_session()
def _test_manytoone(self, passive_updates):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
def _test_onetoone(self, passive_updates):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(
User, users, properties={
def _test_bidirectional(self, passive_updates):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'user': relationship(User, passive_updates=passive_updates,
- backref='addresses')})
+ backref='addresses')})
sess = create_session()
a1 = Address(email='jack1')
def _test_manytomany(self, passive_updates):
users, items, Item, User, users_to_items = (self.tables.users,
- self.tables.items,
- self.classes.Item,
- self.classes.User,
- self.tables.users_to_items)
+ self.tables.items,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.users_to_items)
mapper(
User, users, properties={
"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={'user': relationship(User)})
Column('status', Integer, autoincrement=False, primary_key=True),
Column('username', String(50), nullable=False),
test_needs_acid=True
- )
+ )
@classmethod
def setup_classes(cls):
backref=sa.orm.backref(
'parentnode', remote_side=nodes.c.name,
passive_updates=False),
- )})
+ )})
sess = Session()
n1 = Node(name='n1')
def _test_onetomany(self, passive_updates):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(
User, users, properties={
fk_args = _backend_specific_fk_args()
Table('users', metadata,
- Column('username', String(50), primary_key=True),
- test_needs_fk=True)
+ Column('username', String(50), primary_key=True),
+ test_needs_fk=True)
Table(
'addresses', metadata,
"""
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(
User, users, properties={
"""
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(
User, users, properties={
def _test_change_m2o(self, passive_updates):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(User, users)
mapper(Address, addresses, properties={
def _test_move_m2o(self, passive_updates):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
# tests [ticket:1856]
mapper(User, users)
def test_rowswitch_doesntfire(self):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(User, users)
mapper(Address, addresses, properties={
"""
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(
User, users, properties={
Table(
'manager', metadata, Column('name', String(50),
- ForeignKey('person.name', **fk_args), primary_key=True),
+ ForeignKey('person.name', **fk_args),
+ primary_key=True),
Column('paperwork', String(50)), test_needs_fk=True
)
def test_pk_passive(self):
self._test_pk(True)
- #@testing.requires.non_updating_cascade
+ # @testing.requires.non_updating_cascade
def test_pk_nonpassive(self):
self._test_pk(False)
from sqlalchemy.engine import default
from sqlalchemy.testing.entities import ComparableEntity
from sqlalchemy import Integer, String, ForeignKey
-from .inheritance._poly_fixtures import Company, Person, Engineer, Manager, Boss, \
- Machine, Paperwork, _PolymorphicFixtureBase, _Polymorphic,\
- _PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
- _PolymorphicAliasedJoins
+from .inheritance._poly_fixtures import (Company, Person, Engineer, Manager,
+ Boss, Machine, Paperwork,
+ _PolymorphicFixtureBase, _Polymorphic,
+ _PolymorphicPolymorphic,
+ _PolymorphicUnions, _PolymorphicJoins,
+ _PolymorphicAliasedJoins)
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
self.assert_compile(
sess.query(Company).join(
- Company.employees.of_type(
- with_polymorphic(Person, [Engineer, Manager],
- aliased=True, flat=True)
- )
- ),
+ Company.employees.of_type(
+ with_polymorphic(Person, [Engineer, Manager],
+ aliased=True, flat=True)
+ )
+ ),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
"JOIN %s"
- % (
+ % (
self._polymorphic_join_target([Engineer, Manager])
)
)
def test_with_polymorphic_join_exec_contains_eager_one(self):
sess = Session()
+
def go():
wp = with_polymorphic(Person, [Engineer, Manager],
- aliased=True, flat=True)
+ aliased=True, flat=True)
eq_(
sess.query(Company).join(
Company.employees.of_type(wp)
- ).order_by(Company.company_id, wp.person_id).\
+ ).order_by(Company.company_id, wp.person_id).
options(contains_eager(Company.employees.of_type(wp))).all(),
[self.c1, self.c2]
)
def test_with_polymorphic_join_exec_contains_eager_two(self):
sess = Session()
+
def go():
wp = with_polymorphic(Person, [Engineer, Manager], aliased=True)
eq_(
sess.query(Company).join(
Company.employees.of_type(wp)
- ).order_by(Company.company_id, wp.person_id).\
+ ).order_by(Company.company_id, wp.person_id).
options(contains_eager(Company.employees, alias=wp)).all(),
[self.c1, self.c2]
)
sess = Session()
wp = with_polymorphic(Person, [Engineer], aliased=True)
eq_(
- sess.query(Company.company_id).\
- filter(
- Company.employees.of_type(wp).any(
- wp.Engineer.primary_language == 'java')
- ).all(),
+ sess.query(Company.company_id).
+ filter(
+ Company.employees.of_type(wp).any(
+ wp.Engineer.primary_language == 'java')
+ ).all(),
[(1, )]
)
def test_subqueryload_implicit_withpoly(self):
sess = Session()
+
def go():
eq_(
- sess.query(Company).\
- filter_by(company_id=1).\
- options(subqueryload(Company.employees.of_type(Engineer))).\
- all(),
+ sess.query(Company).
+ filter_by(company_id=1).
+ options(subqueryload(Company.employees.of_type(Engineer))).
+ all(),
[self._company_with_emps_fixture()[0]]
)
self.assert_sql_count(testing.db, go, 4)
def test_joinedload_implicit_withpoly(self):
sess = Session()
+
def go():
eq_(
- sess.query(Company).\
- filter_by(company_id=1).\
- options(joinedload(Company.employees.of_type(Engineer))).\
- all(),
+ sess.query(Company).
+ filter_by(company_id=1).
+ options(joinedload(Company.employees.of_type(Engineer))).
+ all(),
[self._company_with_emps_fixture()[0]]
)
self.assert_sql_count(testing.db, go, 3)
def test_subqueryload_explicit_withpoly(self):
sess = Session()
+
def go():
target = with_polymorphic(Person, Engineer)
eq_(
- sess.query(Company).\
- filter_by(company_id=1).\
- options(subqueryload(Company.employees.of_type(target))).\
- all(),
+ sess.query(Company).
+ filter_by(company_id=1).
+ options(subqueryload(Company.employees.of_type(target))).
+ all(),
[self._company_with_emps_fixture()[0]]
)
self.assert_sql_count(testing.db, go, 4)
def test_joinedload_explicit_withpoly(self):
sess = Session()
+
def go():
target = with_polymorphic(Person, Engineer, flat=True)
eq_(
- sess.query(Company).\
- filter_by(company_id=1).\
- options(joinedload(Company.employees.of_type(target))).\
- all(),
+ sess.query(Company).
+ filter_by(company_id=1).
+ options(joinedload(Company.employees.of_type(target))).
+ all(),
[self._company_with_emps_fixture()[0]]
)
self.assert_sql_count(testing.db, go, 3)
self.assert_sql_count(testing.db, go, 2)
-class PolymorphicPolymorphicTest(_PolymorphicTestBase, _PolymorphicPolymorphic):
+class PolymorphicPolymorphicTest(_PolymorphicTestBase,
+ _PolymorphicPolymorphic):
def _polymorphic_join_target(self, cls):
from sqlalchemy.orm import class_mapper
comp_sel.process(sel, asfrom=True).replace("\n", "") + \
" ON companies.company_id = people_1.company_id"
+
class PolymorphicUnionsTest(_PolymorphicTestBase, _PolymorphicUnions):
def _polymorphic_join_target(self, cls):
comp_sel.process(sel, asfrom=True).replace("\n", "") + \
" AS anon_1 ON companies.company_id = anon_1.company_id"
-class PolymorphicAliasedJoinsTest(_PolymorphicTestBase, _PolymorphicAliasedJoins):
+
+class PolymorphicAliasedJoinsTest(_PolymorphicTestBase,
+ _PolymorphicAliasedJoins):
def _polymorphic_join_target(self, cls):
from sqlalchemy.orm import class_mapper
comp_sel.process(sel, asfrom=True).replace("\n", "") + \
" AS anon_1 ON companies.company_id = anon_1.people_company_id"
+
class PolymorphicJoinsTest(_PolymorphicTestBase, _PolymorphicJoins):
def _polymorphic_join_target(self, cls):
from sqlalchemy.orm import class_mapper
from sqlalchemy.sql.expression import FromGrouping
- sel = FromGrouping(class_mapper(Person)._with_polymorphic_selectable.alias(flat=True))
+ sel = FromGrouping(class_mapper(
+ Person)._with_polymorphic_selectable.alias(flat=True))
comp_sel = sel.compile(dialect=default.DefaultDialect())
return \
q._compile_context
)
-
def test_joinedload_explicit_with_flataliased_poly_compile(self):
sess = Session()
target = with_polymorphic(Person, Engineer, flat=True)
q = sess.query(Company).\
filter_by(company_id=1).\
options(joinedload(Company.employees.of_type(target)))
- self.assert_compile(q,
+ self.assert_compile(
+ q,
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"people_1.person_id AS people_1_person_id, "
"ORDER BY people_1.person_id"
)
-class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeMappedTest):
+
+class SubclassRelationshipTest(testing.AssertsCompiledSQL,
+ fixtures.DeclarativeMappedTest):
"""There's overlap here vs. the ones above."""
run_setup_classes = 'once'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
+
class Job(ComparableEntity, Base):
__tablename__ = "job"
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
type = Column(String(10))
widget_id = Column(ForeignKey('widget.id'))
widget = relationship("Widget")
class ParentThing(ComparableEntity, Base):
__tablename__ = 'parent'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
container_id = Column(Integer, ForeignKey('data_container.id'))
container = relationship("DataContainer")
__tablename__ = "data_container"
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(10))
jobs = relationship(Job, order_by=Job.id)
__tablename__ = "widget"
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(10))
@classmethod
return [
ParentThing(
container=DataContainer(name="d1",
- jobs=[
- SubJob(attr="s1", widget=Widget(name='w1')),
- SubJob(attr="s2", widget=Widget(name='w2'))
- ])
+ jobs=[
+ SubJob(attr="s1",
+ widget=Widget(name='w1')),
+ SubJob(attr="s2",
+ widget=Widget(name='w2'))])
),
ParentThing(
container=DataContainer(name="d2",
- jobs=[
- SubJob(attr="s3", widget=Widget(name='w3')),
- SubJob(attr="s4", widget=Widget(name='w4'))
- ])
+ jobs=[
+ SubJob(attr="s3",
+ widget=Widget(name='w3')),
+ SubJob(attr="s4",
+ widget=Widget(name='w4'))])
),
]
s = Session(testing.db)
q = s.query(DataContainer).\
- join(DataContainer.jobs.of_type(Job_P)).\
- options(contains_eager(DataContainer.jobs.of_type(Job_P)))
+ join(DataContainer.jobs.of_type(Job_P)).\
+ options(contains_eager(DataContainer.jobs.of_type(Job_P)))
+
def go():
eq_(
q.all(),
s = Session(testing.db)
q = s.query(DataContainer).\
- options(joinedload(DataContainer.jobs.of_type(Job_P)))
+ options(joinedload(DataContainer.jobs.of_type(Job_P)))
+
def go():
eq_(
q.all(),
self.classes.SubJob
s = Session(testing.db)
q = s.query(DataContainer).\
- options(joinedload(DataContainer.jobs.of_type(SubJob)))
+ options(joinedload(DataContainer.jobs.of_type(SubJob)))
+
def go():
eq_(
q.all(),
DataContainer = self.classes.DataContainer
s = Session(testing.db)
q = s.query(DataContainer)
+
def go():
eq_(
q.all(),
self.classes.SubJob
s = Session(testing.db)
q = s.query(DataContainer).\
- options(subqueryload(DataContainer.jobs.of_type(SubJob)))
+ options(subqueryload(DataContainer.jobs.of_type(SubJob)))
+
def go():
eq_(
q.all(),
self.classes.SubJob
s = Session(testing.db)
q = s.query(ParentThing).\
- options(
- subqueryload_all(
- ParentThing.container,
- DataContainer.jobs.of_type(SubJob)
- ))
+ options(
+ subqueryload_all(
+ ParentThing.container,
+ DataContainer.jobs.of_type(SubJob)
+ ))
+
def go():
eq_(
q.all(),
s = Session(testing.db)
sj_alias = aliased(SubJob)
q = s.query(DataContainer).\
- options(
- subqueryload_all(
- DataContainer.jobs.of_type(sj_alias),
- sj_alias.widget
- ))
+ options(
+ subqueryload_all(
+ DataContainer.jobs.of_type(sj_alias),
+ sj_alias.widget
+ ))
+
def go():
eq_(
q.all(),
self.classes.SubJob
s = Session(testing.db)
q = s.query(ParentThing).\
- options(
- joinedload_all(
- ParentThing.container,
- DataContainer.jobs.of_type(SubJob)
- ))
+ options(
+ joinedload_all(
+ ParentThing.container,
+ DataContainer.jobs.of_type(SubJob)
+ ))
+
def go():
eq_(
q.all(),
s = Session()
q = s.query(Job).join(DataContainer.jobs).\
- filter(
- DataContainer.jobs.of_type(Job_P).\
- any(Job_P.id < Job.id)
- )
+ filter(
+ DataContainer.jobs.of_type(Job_P).
+ any(Job_P.id < Job.id)
+ )
- self.assert_compile(q,
+ self.assert_compile(
+ q,
"SELECT job.id AS job_id, job.type AS job_type, "
"job.widget_id AS job_widget_id, "
"job.container_id "
"JOIN job ON data_container.id = job.container_id "
"WHERE EXISTS (SELECT 1 "
"FROM job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 "
- "ON job_1.id = subjob_1.id "
+ "ON job_1.id = subjob_1.id "
"WHERE data_container.id = job_1.container_id "
"AND job_1.id < job.id)"
)
s = Session()
q = s.query(Job).join(DataContainer.jobs).\
- filter(
- DataContainer.jobs.of_type(Job_A).\
- any(and_(Job_A.id < Job.id, Job_A.type=='fred'))
- )
- self.assert_compile(q,
+ filter(
+ DataContainer.jobs.of_type(Job_A).
+ any(and_(Job_A.id < Job.id, Job_A.type == 'fred'))
+ )
+ self.assert_compile(
+ q,
"SELECT job.id AS job_id, job.type AS job_type, "
"job.widget_id AS job_widget_id, "
"job.container_id AS job_container_id "
- "FROM data_container JOIN job ON data_container.id = job.container_id "
+ "FROM data_container JOIN job "
+ "ON data_container.id = job.container_id "
"WHERE EXISTS (SELECT 1 "
"FROM job AS job_1 "
"WHERE data_container.id = job_1.container_id "
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P))
self.assert_compile(q,
- "SELECT data_container.id AS data_container_id, "
- "data_container.name AS data_container_name "
- "FROM data_container JOIN "
- "(job LEFT OUTER JOIN subjob "
- "ON job.id = subjob.id) "
- "ON data_container.id = job.container_id")
+ "SELECT data_container.id AS data_container_id, "
+ "data_container.name AS data_container_name "
+ "FROM data_container JOIN "
+ "(job LEFT OUTER JOIN subjob "
+ "ON job.id = subjob.id) "
+ "ON data_container.id = job.container_id")
def test_join_wsubclass(self):
ParentThing, DataContainer, Job, SubJob = \
# query.join(). When we do joinedload() etc., we're instead
# doing a with_polymorphic(), and there we need the join to be
# outer by default.
- self.assert_compile(q,
+ self.assert_compile(
+ q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN (job JOIN subjob ON job.id = subjob.id) "
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P))
self.assert_compile(q,
- "SELECT data_container.id AS data_container_id, "
- "data_container.name AS data_container_name "
- "FROM data_container JOIN "
- "(job JOIN subjob ON job.id = subjob.id) "
- "ON data_container.id = job.container_id")
+ "SELECT data_container.id AS data_container_id, "
+ "data_container.name AS data_container_name "
+ "FROM data_container JOIN "
+ "(job JOIN subjob ON job.id = subjob.id) "
+ "ON data_container.id = job.container_id")
def test_join_walias(self):
ParentThing, DataContainer, Job, SubJob = \
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_A))
self.assert_compile(q,
- "SELECT data_container.id AS data_container_id, "
- "data_container.name AS data_container_name "
- "FROM data_container JOIN job AS job_1 "
- "ON data_container.id = job_1.container_id")
+ "SELECT data_container.id AS data_container_id, "
+ "data_container.name AS data_container_name "
+ "FROM data_container JOIN job AS job_1 "
+ "ON data_container.id = job_1.container_id")
def test_join_explicit_wpoly_noalias(self):
ParentThing, DataContainer, Job, SubJob = \
s = Session()
q = s.query(DataContainer).join(Job_P, DataContainer.jobs)
self.assert_compile(q,
- "SELECT data_container.id AS data_container_id, "
- "data_container.name AS data_container_name "
- "FROM data_container JOIN "
- "(job LEFT OUTER JOIN subjob "
- "ON job.id = subjob.id) "
- "ON data_container.id = job.container_id")
-
+ "SELECT data_container.id AS data_container_id, "
+ "data_container.name AS data_container_name "
+ "FROM data_container JOIN "
+ "(job LEFT OUTER JOIN subjob "
+ "ON job.id = subjob.id) "
+ "ON data_container.id = job.container_id")
def test_join_explicit_wpoly_flat(self):
ParentThing, DataContainer, Job, SubJob = \
s = Session()
q = s.query(DataContainer).join(Job_P, DataContainer.jobs)
self.assert_compile(q,
- "SELECT data_container.id AS data_container_id, "
- "data_container.name AS data_container_name "
- "FROM data_container JOIN "
- "(job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 "
- "ON job_1.id = subjob_1.id) "
- "ON data_container.id = job_1.container_id")
+ "SELECT data_container.id AS data_container_id, "
+ "data_container.name AS data_container_name "
+ "FROM data_container JOIN "
+ "(job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 "
+ "ON job_1.id = subjob_1.id) "
+ "ON data_container.id = job_1.container_id")
def test_join_explicit_wpoly_full_alias(self):
ParentThing, DataContainer, Job, SubJob = \
s = Session()
q = s.query(DataContainer).join(Job_P, DataContainer.jobs)
- self.assert_compile(q,
+ self.assert_compile(
+ q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN "
"(SELECT job.id AS job_id, job.type AS job_type, "
- "job.widget_id AS job_widget_id, "
- "job.container_id AS job_container_id, "
- "subjob.id AS subjob_id, subjob.attr AS subjob_attr "
- "FROM job LEFT OUTER JOIN subjob ON job.id = subjob.id) "
- "AS anon_1 ON data_container.id = anon_1.job_container_id"
+ "job.widget_id AS job_widget_id, "
+ "job.container_id AS job_container_id, "
+ "subjob.id AS subjob_id, subjob.attr AS subjob_attr "
+ "FROM job LEFT OUTER JOIN subjob ON job.id = subjob.id) "
+ "AS anon_1 ON data_container.id = anon_1.job_container_id"
)
@classmethod
def define_tables(cls, metadata):
Table('jack', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('number', String(50)),
Column('status', String(20)),
Column('subroom', String(5)))
Table('port', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(30)),
Column('description', String(100)),
Column('jack_id', Integer, ForeignKey("jack.id")))
def setup_mappers(cls):
class Jack(cls.Basic):
pass
+
class Port(cls.Basic):
pass
-
def test_basic(self):
Port, port, jack, Jack = (self.classes.Port,
- self.tables.port,
- self.tables.jack,
- self.classes.Jack)
+ self.tables.port,
+ self.tables.jack,
+ self.classes.Jack)
mapper(Port, port)
mapper(Jack, jack,
properties=dict(
- port=relationship(Port, backref='jack',
- uselist=False,
- )),
- )
+ port=relationship(Port, backref='jack', uselist=False)))
session = create_session()
p = Port(name='fa0/1')
session.add(p)
- j.port=p
+ j.port = p
session.flush()
jid = j.id
pid = p.id
- j=session.query(Jack).get(jid)
- p=session.query(Port).get(pid)
+ j = session.query(Jack).get(jid)
+ p = session.query(Port).get(pid)
assert p.jack is not None
- assert p.jack is j
+ assert p.jack is j
assert j.port is not None
p.jack = None
assert j.port is None
j = session.query(Jack).get(jid)
p = session.query(Port).get(pid)
- j.port=None
+ j.port = None
self.assert_(p.jack is None)
session.flush()
session.delete(j)
session.flush()
-
def test_str(self):
User = self.classes.User
- l = Load(User)
- l.strategy = (('deferred', False), ('instrument', True))
+ result = Load(User)
+ result.strategy = (('deferred', False), ('instrument', True))
eq_(
- str(l),
+ str(result),
"Load(strategy=(('deferred', False), ('instrument', True)))"
)
User = self.classes.User
Address = self.classes.Address
- l = Load(User)
+ result = Load(User)
eq_(
- l._generate_path(inspect(User)._path_registry, User.addresses, "relationship"),
+ result._generate_path(inspect(User)._path_registry,
+ User.addresses, "relationship"),
self._make_path_registry([User, "addresses", Address])
)
def test_gen_path_attr_column(self):
User = self.classes.User
- l = Load(User)
+ result = Load(User)
eq_(
- l._generate_path(inspect(User)._path_registry, User.name, "column"),
+ result._generate_path(inspect(User)._path_registry,
+ User.name, "column"),
self._make_path_registry([User, "name"])
)
User = self.classes.User
Address = self.classes.Address
- l = Load(User)
+ result = Load(User)
eq_(
- l._generate_path(inspect(User)._path_registry, "addresses", "relationship"),
+ result._generate_path(inspect(User)._path_registry,
+ "addresses", "relationship"),
self._make_path_registry([User, "addresses", Address])
)
def test_gen_path_string_column(self):
User = self.classes.User
- l = Load(User)
+ result = Load(User)
eq_(
- l._generate_path(inspect(User)._path_registry, "name", "column"),
+ result._generate_path(
+ inspect(User)._path_registry, "name", "column"),
self._make_path_registry([User, "name"])
)
def test_gen_path_invalid_from_col(self):
User = self.classes.User
- l = Load(User)
- l.path = self._make_path_registry([User, "name"])
+ result = Load(User)
+ result.path = self._make_path_registry([User, "name"])
assert_raises_message(
sa.exc.ArgumentError,
"Attribute 'name' of entity 'Mapper|User|users' does "
- "not refer to a mapped entity",
- l._generate_path, l.path, User.addresses, "relationship"
+ "not refer to a mapped entity",
+ result._generate_path, result.path, User.addresses, "relationship"
)
+
def test_gen_path_attr_entity_invalid_raiseerr(self):
User = self.classes.User
Order = self.classes.Order
- l = Load(User)
+ result = Load(User)
assert_raises_message(
sa.exc.ArgumentError,
- "Attribute 'Order.items' does not link from element 'Mapper|User|users'",
- l._generate_path,
+ "Attribute 'Order.items' does not link from element "
+ "'Mapper|User|users'",
+ result._generate_path,
inspect(User)._path_registry, Order.items, "relationship",
)
User = self.classes.User
Order = self.classes.Order
- l = Load(User)
+ result = Load(User)
- eq_(
- l._generate_path(
- inspect(User)._path_registry, Order.items, "relationship", False
- ),
- None
- )
+ eq_(result._generate_path(inspect(User)._path_registry, Order.items,
+ "relationship", False),
+ None)
def test_set_strat_ent(self):
User = self.classes.User
)
-
-
class OptionsTest(PathTest, QueryTest):
def _option_fixture(self, *arg):
return strategy_options._UnboundLoad._from_keys(
- strategy_options._UnboundLoad.joinedload, arg, True, {})
-
-
+ strategy_options._UnboundLoad.joinedload, arg, True, {})
def test_get_path_one_level_string(self):
User = self.classes.User
q = sess.query(User)
opt = self._option_fixture('email_address', 'id')
q = sess.query(Address)._with_current_path(
- orm_util.PathRegistry.coerce([inspect(User),
- inspect(User).attrs.addresses])
- )
+ orm_util.PathRegistry.coerce([inspect(User),
+ inspect(User).attrs.addresses])
+ )
self._assert_path_result(opt, q, [])
def test_get_path_one_level_with_unrelated(self):
def test_path_multilevel_string(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = Session()
q = sess.query(User)
def test_path_multilevel_attribute(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = Session()
q = sess.query(User)
def test_with_current_matching_string(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = Session()
q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
opt = self._option_fixture("orders.items.keywords")
self._assert_path_result(opt, q, [
def test_with_current_matching_attribute(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = Session()
q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
opt = self._option_fixture(User.orders, Order.items, Item.keywords)
self._assert_path_result(opt, q, [
def test_with_current_nonmatching_string(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = Session()
q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
opt = self._option_fixture("keywords")
self._assert_path_result(opt, q, [])
def test_with_current_nonmatching_attribute(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = Session()
q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
opt = self._option_fixture(Item.keywords)
self._assert_path_result(opt, q, [])
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess = Session()
+
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address, properties={
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess = Session()
+
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address, properties={
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess = Session()
+
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address, properties={
opt = self._option_fixture(SubAddr.user)
self._assert_path_result(opt, q,
- [(Address, inspect(Address).attrs.user)])
+ [(Address, inspect(Address).attrs.user)])
def test_of_type(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
+
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address)
q = sess.query(User)
- opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.user)
+ opt = self._option_fixture(
+ User.addresses.of_type(SubAddr), SubAddr.user)
u_mapper = inspect(User)
a_mapper = inspect(Address)
def test_of_type_plus_level(self):
Dingaling, User, Address = (self.classes.Dingaling,
- self.classes.User,
- self.classes.Address)
+ self.classes.User,
+ self.classes.Address)
sess = Session()
+
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address, properties={
})
q = sess.query(User)
- opt = self._option_fixture(User.addresses.of_type(SubAddr), SubAddr.flub)
+ opt = self._option_fixture(
+ User.addresses.of_type(SubAddr), SubAddr.flub)
u_mapper = inspect(User)
sa_mapper = inspect(SubAddr)
self._assert_path_result(opt, q, [
(u_mapper, u_mapper.attrs.addresses),
- (u_mapper, u_mapper.attrs.addresses, sa_mapper, sa_mapper.attrs.flub)
+ (u_mapper, u_mapper.attrs.addresses, sa_mapper,
+ sa_mapper.attrs.flub)
])
def test_aliased_single(self):
sess = Session()
ualias = aliased(User)
q = sess.query(ualias)._with_current_path(
- self._make_path_registry([Address, 'user'])
- )
+ self._make_path_registry([Address, 'user'])
+ )
opt = self._option_fixture(Address.user, ualias.addresses)
self._assert_path_result(opt, q, [(inspect(ualias), 'addresses')])
sess = Session()
ualias = aliased(User)
q = sess.query(User)._with_current_path(
- self._make_path_registry([Address, 'user'])
- )
+ self._make_path_registry([Address, 'user'])
+ )
opt = self._option_fixture(Address.user, ualias.addresses)
self._assert_path_result(opt, q, [])
sess = Session()
ualias = aliased(User)
q = sess.query(ualias)._with_current_path(
- self._make_path_registry([Address, 'user'])
- )
+ self._make_path_registry([Address, 'user'])
+ )
opt = self._option_fixture(Address.user, User.addresses)
self._assert_path_result(opt, q, [])
opt = self._option_fixture(User.orders)
sess = Session()
q = sess.query(Item)._with_current_path(
- self._make_path_registry([User, 'orders', Order, 'items'])
- )
+ self._make_path_registry([User, 'orders', Order, 'items'])
+ )
self._assert_path_result(opt, q, [])
def test_chained(self):
q = sess.query(User)
opt = self._option_fixture(User.orders).joinedload("items")
self._assert_path_result(opt, q, [
- (User, 'orders'),
- (User, 'orders', Order, "items")
- ])
+ (User, 'orders'),
+ (User, 'orders', Order, "items")
+ ])
def test_chained_plus_dotted(self):
User = self.classes.User
q = sess.query(User)
opt = self._option_fixture("orders.items").joinedload("keywords")
self._assert_path_result(opt, q, [
- (User, 'orders'),
- (User, 'orders', Order, "items"),
- (User, 'orders', Order, "items", Item, "keywords")
- ])
+ (User, 'orders'),
+ (User, 'orders', Order, "items"),
+ (User, 'orders', Order, "items", Item, "keywords")
+ ])
def test_chained_plus_multi(self):
User = self.classes.User
Item = self.classes.Item
sess = Session()
q = sess.query(User)
- opt = self._option_fixture(User.orders, Order.items).joinedload("keywords")
+ opt = self._option_fixture(
+ User.orders, Order.items).joinedload("keywords")
self._assert_path_result(opt, q, [
- (User, 'orders'),
- (User, 'orders', Order, "items"),
- (User, 'orders', Order, "items", Item, "keywords")
- ])
+ (User, 'orders'),
+ (User, 'orders', Order, "items"),
+ (User, 'orders', Order, "items", Item, "keywords")
+ ])
class OptionsNoPropTest(_fixtures.FixtureTest):
"Query has only expression-based entities - "\
"can't find property named 'keywords'."
self._assert_eager_with_just_column_exception(Item.id,
- 'keywords', message)
+ 'keywords', message)
def test_option_with_column_PropComparator(self):
Item = self.classes.Item
- self._assert_eager_with_just_column_exception(Item.id,
- Item.keywords,
- "Query has only expression-based entities "
- "- can't find property named 'keywords'."
- )
+ self._assert_eager_with_just_column_exception(
+ Item.id,
+ Item.keywords,
+ "Query has only expression-based entities "
+ "- can't find property named 'keywords'."
+ )
def test_option_against_nonexistent_PropComparator(self):
Item = self.classes.Item
(joinedload(Item.keywords), ),
r"Can't find property 'keywords' on any entity specified "
r"in this Query. Note the full path from root "
- r"\(Mapper\|Keyword\|keywords\) to target entity must be specified."
+ r"\(Mapper\|Keyword\|keywords\) to target entity must be "
+ r"specified."
)
def test_option_against_nonexistent_basestring(self):
r"Mapper\|Keyword\|keywords in this Query."
)
- @testing.fails_if(lambda: True,
+ @testing.fails_if(
+ lambda: True,
"PropertyOption doesn't yet check for relation/column on end result")
def test_option_against_non_relation_basestring(self):
Item = self.classes.Item
"does not refer to a mapped entity"
)
- @testing.fails_if(lambda: True,
- "PropertyOption doesn't yet check for relation/column on end result")
+ @testing.fails_if(
+ lambda: True,
+ "PropertyOption doesn't yet check for relation/column on end result")
def test_option_against_multi_non_relation_basestring(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
@classmethod
def setup_mappers(cls):
users, User, addresses, Address, orders, Order = (
- cls.tables.users, cls.classes.User,
- cls.tables.addresses, cls.classes.Address,
- cls.tables.orders, cls.classes.Order)
+ cls.tables.users, cls.classes.User,
+ cls.tables.addresses, cls.classes.Address,
+ cls.tables.orders, cls.classes.Order)
mapper(User, users, properties={
'addresses': relationship(Address),
'orders': relationship(Order)
})
mapper(Address, addresses)
mapper(Order, orders)
- keywords, items, item_keywords, Keyword, Item = (cls.tables.keywords,
- cls.tables.items,
- cls.tables.item_keywords,
- cls.classes.Keyword,
- cls.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ cls.tables.keywords,
+ cls.tables.items,
+ cls.tables.item_keywords,
+ cls.classes.Keyword,
+ cls.classes.Item)
mapper(Keyword, keywords, properties={
"keywords": column_property(keywords.c.name + "some keyword")
})
mapper(Item, items,
properties=dict(keywords=relationship(Keyword,
- secondary=item_keywords)))
+ secondary=item_keywords)))
def _assert_option(self, entity_list, option):
Item = self.classes.Item
q = create_session().query(*entity_list).\
- options(joinedload(option))
+ options(joinedload(option))
key = ('loader', (inspect(Item), inspect(Item).attrs.keywords))
assert key in q._attributes
def _assert_eager_with_entity_exception(self, entity_list, options,
- message):
+ message):
assert_raises_message(sa.exc.ArgumentError,
- message,
+ message,
create_session().query(*entity_list).options,
*options)
def _assert_eager_with_just_column_exception(self, column,
- eager_option, message):
+ eager_option, message):
assert_raises_message(sa.exc.ArgumentError, message,
create_session().query(column).options,
joinedload(eager_option))
from sqlalchemy import Integer, String, ForeignKey, exc, MetaData
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, \
- sessionmaker, attributes, interfaces,\
- clear_mappers, exc as orm_exc,\
- configure_mappers, Session, lazyload_all,\
- lazyload, aliased
+ sessionmaker, attributes, interfaces,\
+ clear_mappers, exc as orm_exc,\
+ configure_mappers, Session, lazyload_all,\
+ lazyload, aliased
from sqlalchemy.orm import state as sa_state
from sqlalchemy.orm import instrumentation
from sqlalchemy.orm.collections import attribute_mapped_collection, \
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('email_address', String(50), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('orders', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('user_id', None, ForeignKey('users.id')),
- Column('address_id', None, ForeignKey('addresses.id')),
- Column('description', String(30)),
- Column('isopen', Integer),
- test_needs_acid=True,
- test_needs_fk=True
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', None, ForeignKey('users.id')),
+ Column('address_id', None, ForeignKey('addresses.id')),
+ Column('description', String(30)),
+ Column('isopen', Integer),
+ test_needs_acid=True,
+ test_needs_fk=True)
Table("dingalings", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('address_id', None, ForeignKey('addresses.id')),
Column('data', String(30)),
test_needs_acid=True,
- test_needs_fk=True
- )
-
+ test_needs_fk=True)
def test_transient(self):
users, addresses = (self.tables.users,
- self.tables.addresses)
+ self.tables.addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref="user")
+ 'addresses': relationship(Address, backref="user")
})
mapper(Address, addresses)
def test_no_mappers(self):
users = self.tables.users
-
umapper = mapper(User, users)
u1 = User(name='ed')
u1_pickled = pickle.dumps(u1, -1)
assert_raises_message(
orm_exc.UnmappedInstanceError,
- "Cannot deserialize object of type <class 'sqlalchemy.testing.pickleable.User'> - no mapper()",
+ "Cannot deserialize object of type "
+ "<class 'sqlalchemy.testing.pickleable.User'> - no mapper()",
pickle.loads, u1_pickled)
def test_no_instrumentation(self):
users = self.tables.users
-
umapper = mapper(User, users)
u1 = User(name='ed')
u1_pickled = pickle.dumps(u1, -1)
# compiles the mapper
eq_(str(u1), "User(name='ed')")
-
def test_class_deferred_cols(self):
addresses, users = (self.tables.addresses,
- self.tables.users)
+ self.tables.users)
mapper(User, users, properties={
'name': sa.orm.deferred(users.c.name),
sess2 = create_session()
sess2.add(u2)
eq_(u2.name, 'ed')
- eq_(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
+ eq_(u2, User(name='ed', addresses=[
+ Address(email_address='ed@bar.com')]))
u2 = pickle.loads(pickle.dumps(u1))
sess2 = create_session()
u2 = sess2.merge(u2, load=False)
eq_(u2.name, 'ed')
- eq_(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
+ eq_(u2, User(name='ed', addresses=[
+ Address(email_address='ed@bar.com')]))
def test_instance_lazy_relation_loaders(self):
users, addresses = (self.tables.users,
- self.tables.addresses)
+ self.tables.addresses)
mapper(User, users, properties={
'addresses': relationship(Address, lazy='noload')
mapper(Address, addresses)
sess = Session()
- u1 = User(name='ed', addresses=[
- Address(
- email_address='ed@bar.com',
- )
- ])
+ u1 = User(name='ed', addresses=[Address(email_address='ed@bar.com')])
sess.add(u1)
sess.commit()
sess.close()
- u1 = sess.query(User).options(
- lazyload(User.addresses)
- ).first()
+ u1 = sess.query(User).options(lazyload(User.addresses)).first()
u2 = pickle.loads(pickle.dumps(u1))
sess = Session()
assert u2.addresses
def test_invalidated_flag_pickle(self):
- users, addresses = (self.tables.users,
- self.tables.addresses)
+ users, addresses = (self.tables.users, self.tables.addresses)
mapper(User, users, properties={
'addresses': relationship(Address, lazy='noload')
eq_(len(u2.addresses), 2)
def test_invalidated_flag_deepcopy(self):
- users, addresses = (self.tables.users,
- self.tables.addresses)
+ users, addresses = (self.tables.users, self.tables.addresses)
mapper(User, users, properties={
'addresses': relationship(Address, lazy='noload')
@testing.requires.non_broken_pickle
def test_instance_deferred_cols(self):
- users, addresses = (self.tables.users,
- self.tables.addresses)
+ users, addresses = (self.tables.users, self.tables.addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref="user")
+ 'addresses': relationship(Address, backref="user")
})
mapper(Address, addresses)
sess.expunge_all()
u1 = sess.query(User).\
- options(sa.orm.defer('name'),
- sa.orm.defer('addresses.email_address')).\
- get(u1.id)
+ options(sa.orm.defer('name'),
+ sa.orm.defer('addresses.email_address')).\
+ get(u1.id)
assert 'name' not in u1.__dict__
assert 'addresses' not in u1.__dict__
ad = u2.addresses[0]
assert 'email_address' not in ad.__dict__
eq_(ad.email_address, 'ed@bar.com')
- eq_(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
+ eq_(u2, User(name='ed', addresses=[
+ Address(email_address='ed@bar.com')]))
u2 = pickle.loads(pickle.dumps(u1))
sess2 = create_session()
assert 'email_address' not in ad.__dict__
eq_(ad.email_address, 'ed@bar.com')
- eq_(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
+ eq_(u2, User(name='ed', addresses=[
+ Address(email_address='ed@bar.com')]))
def test_pickle_protocols(self):
- users, addresses = (self.tables.users,
- self.tables.addresses)
+ users, addresses = (self.tables.users, self.tables.addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref="user")
+ 'addresses': relationship(Address, backref="user")
})
mapper(Address, addresses)
@testing.requires.non_broken_pickle
def test_options_with_descriptors(self):
users, addresses, dingalings = (self.tables.users,
- self.tables.addresses,
- self.tables.dingalings)
+ self.tables.addresses,
+ self.tables.dingalings)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref="user")
+ 'addresses': relationship(Address, backref="user")
})
mapper(Address, addresses, properties={
- 'dingaling':relationship(Dingaling)
+ 'dingaling': relationship(Dingaling)
})
mapper(Dingaling, dingalings)
sess = create_session()
m = MetaData()
c1 = Table('c1', m,
- Column('parent_id', String,
- ForeignKey('p.id'), primary_key=True)
- )
+ Column('parent_id', String, ForeignKey('p.id'),
+ primary_key=True))
c2 = Table('c2', m,
- Column('parent_id', String,
- ForeignKey('p.id'), primary_key=True)
- )
- p = Table('p', m,
- Column('id', String, primary_key=True)
- )
+ Column('parent_id', String, ForeignKey('p.id'),
+ primary_key=True))
+ p = Table('p', m, Column('id', String, primary_key=True))
mapper(Parent, p, properties={
- 'children1':relationship(Child1),
- 'children2':relationship(Child2)
+ 'children1': relationship(Child1),
+ 'children2': relationship(Child2)
})
mapper(Child1, c1)
mapper(Child2, c2)
users, addresses = self.tables.users, self.tables.addresses
mapper(User, users, properties={
- 'addresses':relationship(
- Address,
- collection_class=
- attribute_mapped_collection('email_address')
- )
+ 'addresses': relationship(
+ Address,
+ collection_class=attribute_mapped_collection('email_address')
+ )
})
mapper(Address, addresses)
u1 = User()
- u1.addresses = {"email1":Address(email_address="email1")}
+ u1.addresses = {"email1": Address(email_address="email1")}
for loads, dumps in picklers():
repickled = loads(dumps(u1))
eq_(u1.addresses, repickled.addresses)
eq_(repickled.addresses['email1'],
- Address(email_address="email1"))
+ Address(email_address="email1"))
def test_column_mapped_collection(self):
users, addresses = self.tables.users, self.tables.addresses
mapper(User, users, properties={
- 'addresses':relationship(
- Address,
- collection_class=
- column_mapped_collection(
- addresses.c.email_address)
- )
+ 'addresses': relationship(
+ Address,
+ collection_class=column_mapped_collection(
+ addresses.c.email_address)
+ )
})
mapper(Address, addresses)
u1 = User()
u1.addresses = {
- "email1":Address(email_address="email1"),
- "email2":Address(email_address="email2")
+ "email1": Address(email_address="email1"),
+ "email2": Address(email_address="email2")
}
for loads, dumps in picklers():
repickled = loads(dumps(u1))
eq_(u1.addresses, repickled.addresses)
eq_(repickled.addresses['email1'],
- Address(email_address="email1"))
+ Address(email_address="email1"))
def test_composite_column_mapped_collection(self):
users, addresses = self.tables.users, self.tables.addresses
mapper(User, users, properties={
- 'addresses':relationship(
- Address,
- collection_class=
- column_mapped_collection([
- addresses.c.id,
- addresses.c.email_address])
- )
+ 'addresses': relationship(
+ Address,
+ collection_class=column_mapped_collection([
+ addresses.c.id,
+ addresses.c.email_address])
+ )
})
mapper(Address, addresses)
u1 = User()
u1.addresses = {
- (1, "email1"):Address(id=1, email_address="email1"),
- (2, "email2"):Address(id=2, email_address="email2")
+ (1, "email1"): Address(id=1, email_address="email1"),
+ (2, "email2"): Address(id=2, email_address="email2")
}
for loads, dumps in picklers():
repickled = loads(dumps(u1))
eq_(u1.addresses, repickled.addresses)
eq_(repickled.addresses[(1, 'email1')],
- Address(id=1, email_address="email1"))
+ Address(id=1, email_address="email1"))
+
class PolymorphicDeferredTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('type', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('type', String(30)))
Table('email_users', metadata,
- Column('id', Integer, ForeignKey('users.id'), primary_key=True),
- Column('email_address', String(30)))
-
+ Column('id', Integer, ForeignKey('users.id'), primary_key=True),
+ Column('email_address', String(30)))
def test_polymorphic_deferred(self):
email_users, users = (self.tables.email_users,
- self.tables.users,
- )
+ self.tables.users,
+ )
- mapper(User, users, polymorphic_identity='user', polymorphic_on=users.c.type)
- mapper(EmailUser, email_users, inherits=User, polymorphic_identity='emailuser')
+ mapper(User, users, polymorphic_identity='user',
+ polymorphic_on=users.c.type)
+ mapper(EmailUser, email_users, inherits=User,
+ polymorphic_identity='emailuser')
eu = EmailUser(name="user1", email_address='foo@bar.com')
sess = create_session()
assert 'email_address' not in eu2.__dict__
eq_(eu2.email_address, 'foo@bar.com')
+
class TupleLabelTest(_fixtures.FixtureTest):
@classmethod
def setup_classes(cls):
@classmethod
def setup_mappers(cls):
- users, addresses, orders = cls.tables.users, cls.tables.addresses, cls.tables.orders
+ users, addresses, orders = (cls.tables.users, cls.tables.addresses,
+ cls.tables.orders)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', order_by=addresses.c.id),
- 'orders':relationship(Order, backref='user', order_by=orders.c.id), # o2m, m2o
+ 'addresses': relationship(Address, backref='user',
+ order_by=addresses.c.id),
+ # o2m, m2o
+ 'orders': relationship(Order, backref='user',
+ order_by=orders.c.id),
})
mapper(Address, addresses)
mapper(Order, orders, properties={
- 'address':relationship(Address), # m2o
+ 'address': relationship(Address), # m2o
})
def test_tuple_labeling(self):
eq_(row.name, row[0])
eq_(row.foobar, row[1])
- for row in sess.query(User).values(User.name, User.id.label('foobar')):
+ for row in sess.query(User).values(User.name,
+ User.id.label('foobar')):
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
eq_(list(row.keys()), ['name', 'foobar'])
eq_(row.User, row[0])
oalias = aliased(Order, name='orders')
- for row in sess.query(User, oalias).join(oalias, User.orders).all():
+ for row in sess.query(User, oalias).join(oalias, User.orders) \
+ .all():
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
eq_(list(row.keys()), ['User', 'orders'])
ret = sess.query(User, Address).join(User.addresses).all()
pickle.loads(pickle.dumps(ret, pickled))
+
class CustomSetupTeardownTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('email_address', String(50), nullable=False),
test_needs_acid=True,
- test_needs_fk=True
- )
+ test_needs_fk=True)
+
def test_rebuild_state(self):
"""not much of a 'test', but illustrate how to
remove instance-level state before pickling.
u2 = pickle.loads(pickle.dumps(u1))
attributes.manager_of_class(User).setup_instance(u2)
assert attributes.instance_state(u2)
-
def test_column_metadata(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses)
UserThing, s, properties={
'id': (users.c.id, addresses.c.user_id),
'address_id': addresses.c.id,
- })
+ })
sess = create_session()
u10 = sess.query(UserThing).get((10, None))
eq_(u10, UserThing(id=10))
def test_invalid_from_statement(self):
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
+ self.tables.addresses,
+ self.tables.users)
s = create_session()
q = s.query(User)
def test_o2m_compare_to_null(self):
User = self.classes.User
- self._test(User.id == None, "users.id IS NULL")
- self._test(User.id != None, "users.id IS NOT NULL")
- self._test(~(User.id == None), "users.id IS NOT NULL")
- self._test(~(User.id != None), "users.id IS NULL")
- self._test(None == User.id, "users.id IS NULL")
- self._test(~(None == User.id), "users.id IS NOT NULL")
+ self._test(User.id == None, "users.id IS NULL") # noqa
+ self._test(User.id != None, "users.id IS NOT NULL") # noqa
+ self._test(~(User.id == None), "users.id IS NOT NULL") # noqa
+ self._test(~(User.id != None), "users.id IS NULL") # noqa
+ self._test(None == User.id, "users.id IS NULL") # noqa
+ self._test(~(None == User.id), "users.id IS NOT NULL") # noqa
def test_m2o_compare_to_null(self):
Address = self.classes.Address
- self._test(Address.user == None, "addresses.user_id IS NULL")
- self._test(~(Address.user == None), "addresses.user_id IS NOT NULL")
- self._test(~(Address.user != None), "addresses.user_id IS NULL")
- self._test(None == Address.user, "addresses.user_id IS NULL")
- self._test(~(None == Address.user), "addresses.user_id IS NOT NULL")
+ self._test(Address.user == None, "addresses.user_id IS NULL") # noqa
+ self._test(~(Address.user == None), # noqa
+ "addresses.user_id IS NOT NULL")
+ self._test(~(Address.user != None), # noqa
+ "addresses.user_id IS NULL")
+ self._test(None == Address.user, "addresses.user_id IS NULL") # noqa
+ self._test(~(None == Address.user), # noqa
+ "addresses.user_id IS NOT NULL")
def test_o2m_compare_to_null_orm_adapt(self):
User, Address = self.classes.User, self.classes.Address
self._test_filter_aliases(
- User.id == None,
+ User.id == None, # noqa
"users_1.id IS NULL", Address, Address.user),
self._test_filter_aliases(
- User.id != None,
+ User.id != None, # noqa
"users_1.id IS NOT NULL", Address, Address.user),
self._test_filter_aliases(
- ~(User.id == None),
+ ~(User.id == None), # noqa
"users_1.id IS NOT NULL", Address, Address.user),
self._test_filter_aliases(
- ~(User.id != None),
+ ~(User.id != None), # noqa
"users_1.id IS NULL", Address, Address.user),
def test_m2o_compare_to_null_orm_adapt(self):
User, Address = self.classes.User, self.classes.Address
self._test_filter_aliases(
- Address.user == None,
+ Address.user == None, # noqa
"addresses_1.user_id IS NULL", User, User.addresses),
self._test_filter_aliases(
- Address.user != None,
+ Address.user != None, # noqa
"addresses_1.user_id IS NOT NULL", User, User.addresses),
self._test_filter_aliases(
- ~(Address.user == None),
+ ~(Address.user == None), # noqa
"addresses_1.user_id IS NOT NULL", User, User.addresses),
self._test_filter_aliases(
- ~(Address.user != None),
+ ~(Address.user != None), # noqa
"addresses_1.user_id IS NULL", User, User.addresses),
def test_o2m_compare_to_null_aliased(self):
User = self.classes.User
u1 = aliased(User)
- self._test(u1.id == None, "users_1.id IS NULL")
- self._test(u1.id != None, "users_1.id IS NOT NULL")
- self._test(~(u1.id == None), "users_1.id IS NOT NULL")
- self._test(~(u1.id != None), "users_1.id IS NULL")
+ self._test(u1.id == None, "users_1.id IS NULL") # noqa
+ self._test(u1.id != None, "users_1.id IS NOT NULL") # noqa
+ self._test(~(u1.id == None), "users_1.id IS NOT NULL") # noqa
+ self._test(~(u1.id != None), "users_1.id IS NULL") # noqa
def test_m2o_compare_to_null_aliased(self):
Address = self.classes.Address
a1 = aliased(Address)
- self._test(a1.user == None, "addresses_1.user_id IS NULL")
- self._test(~(a1.user == None), "addresses_1.user_id IS NOT NULL")
- self._test(a1.user != None, "addresses_1.user_id IS NOT NULL")
- self._test(~(a1.user != None), "addresses_1.user_id IS NULL")
+ self._test(a1.user == None, "addresses_1.user_id IS NULL") # noqa
+ self._test(~(a1.user == None), # noqa
+ "addresses_1.user_id IS NOT NULL")
+ self._test(a1.user != None, "addresses_1.user_id IS NOT NULL") # noqa
+ self._test(~(a1.user != None), "addresses_1.user_id IS NULL") # noqa
def test_relationship_unimplemented(self):
User = self.classes.User
# needs autoaliasing
self._test(
- Node.children == None,
+ Node.children == None, # noqa
"NOT (EXISTS (SELECT 1 FROM nodes AS nodes_1 "
"WHERE nodes.id = nodes_1.parent_id))",
entity=Node,
)
self._test(
- Node.parent == None,
+ Node.parent == None, # noqa
"nodes.parent_id IS NULL",
checkparams={}
)
self._test(
- nalias.parent == None,
+ nalias.parent == None, # noqa
"nodes_1.parent_id IS NULL",
checkparams={}
)
self._test(
- nalias.parent != None,
+ nalias.parent != None, # noqa
"nodes_1.parent_id IS NOT NULL",
checkparams={}
)
self._test(
- nalias.children == None,
+ nalias.children == None, # noqa
"NOT (EXISTS ("
"SELECT 1 FROM nodes WHERE nodes_1.id = nodes.parent_id))",
entity=nalias,
"nodes_1.id = nodes.parent_id AND nodes.data = :data_1)",
entity=nalias,
checkparams={'data_1': 'some data'}
- )
+ )
# this fails because self-referential any() is auto-aliasing;
# the fact that we use "nalias" here means we get two aliases.
- #self._test(
+ # self._test(
# Node.children.any(nalias.data == 'some data'),
# "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE "
# "nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)",
q = sess.query(User).filter(
User.addresses.any(
and_(Address.id == Dingaling.address_id,
- Dingaling.data == 'x')))
+ Dingaling.data == 'x')))
# new since #2746 - correlate_except() now takes context into account
# so its usage in any() is not as disrupting.
self.assert_compile(
(func.max(User.id), "max(users.id)"),
(User.id.desc(), "users.id DESC"),
(between(5, User.id, Address.id),
- ":param_1 BETWEEN users.id AND addresses.id"),
+ ":param_1 BETWEEN users.id AND addresses.id"),
# this one would require adding compile() to
# InstrumentedScalarAttribute. do we want this ?
# (User.id, "users.id")
def test_deferred_instances(self):
User, addresses, Address = (self.classes.User,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
session = create_session()
s = session.query(User).filter(
and_(addresses.c.email_address == bindparam('emailad'),
- Address.user_id == User.id)).statement
+ Address.user_id == User.id)).statement
- l = list(
+ result = list(
session.query(User).instances(s.execute(emailad='jack@bean.com')))
- eq_([User(id=7)], l)
+ eq_([User(id=7)], result)
def test_aliased_sql_construct(self):
User, Address = self.classes.User, self.classes.Address
a1 = session.query(User.id, ua.id, ua.name).\
filter(User.id == ua.id).subquery(reduce_columns=True)
self.assert_compile(a1,
- "SELECT users.id, users_1.name FROM "
- "users, users AS users_1 WHERE users.id = users_1.id")
+ "SELECT users.id, users_1.name FROM "
+ "users, users AS users_1 "
+ "WHERE users.id = users_1.id")
def test_label(self):
User = self.classes.User
q2 = s.query(User).filter(User.name == 'fred').with_labels()
eq_(
s.query(User).from_statement(union(q1, q2).
- order_by('users_name')).all(), [User(name='ed'), User(name='fred')]
+ order_by('users_name')).all(),
+ [User(name='ed'), User(name='fred')]
)
def test_select(self):
self.assert_compile(
select([q1.group_by(User.id)]),
"SELECT users_id, users_name FROM (SELECT users.id AS users_id, "
- "users.name AS users_name FROM users GROUP BY users.name, users.id)"
+ "users.name AS users_name FROM users "
+ "GROUP BY users.name, users.id)"
)
def test_group_by_cancellation(self):
q1._no_criterion_assertion("foo")
+
class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
run_setup_mappers = 'each'
"FROM users, users AS users_1"
)
-
def test_order_by_column_prop_string(self):
User, Address = self.classes("User", "Address")
self._fixture(label=True)
"users.id AS users_id FROM users AS users_1, users ORDER BY anon_1"
)
-
def test_order_by_column_unlabeled_prop_attr_aliased_one(self):
User = self.classes.User
self._fixture(label=False)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
- "FROM users ORDER BY (SELECT max(addresses.email_address) AS max_1 "
+ "FROM users ORDER BY "
+ "(SELECT max(addresses.email_address) AS max_1 "
"FROM addresses "
"WHERE addresses.user_id = users.id)"
)
sess = Session()
eq_(
- sess.query(Comparator(User.id)).order_by(Comparator(User.id)).all(),
+ sess.query(Comparator(User.id)).order_by(
+ Comparator(User.id)).all(),
[(7, ), (8, ), (9, ), (10, )]
)
assert True
assert [User(id=10)] == \
- sess.query(User).filter(User.addresses == None).all()
+ sess.query(User).filter(User.addresses == None).all() # noqa
try:
assert [User(id=7), User(id=9), User(id=10)] == \
sess.query(Address).filter(Address.user != user).all()
# generates an IS NULL
- assert [] == sess.query(Address).filter(Address.user == None).all()
+ assert [] == sess.query(Address).filter(Address.user == None).all() # noqa
assert [] == sess.query(Address).filter(Address.user == null()).all()
assert [Order(id=5)] == \
- sess.query(Order).filter(Order.address == None).all()
+ sess.query(Order).filter(Order.address == None).all() # noqa
# o2o
dingaling = sess.query(Dingaling).get(2)
# m2m
eq_(
- sess.query(Item).filter(Item.keywords == None).
+ sess.query(Item).filter(Item.keywords == None). # noqa
order_by(Item.id).all(), [Item(id=4), Item(id=5)])
eq_(
- sess.query(Item).filter(Item.keywords != None).
+ sess.query(Item).filter(Item.keywords != None). # noqa
order_by(Item.id).all(), [Item(id=1), Item(id=2), Item(id=3)])
def test_filter_by(self):
assert [User(name='chuck')] == \
sess.query(User).filter_by(addresses=null()).all()
-
def test_filter_by_tables(self):
users = self.tables.users
addresses = self.tables.addresses
# scalar
eq_(
[Order(description="order 5")],
- sess.query(Order).filter(Order.address_id == None).all()
+ sess.query(Order).filter(Order.address_id == None).all() # noqa
)
eq_(
[Order(description="order 5")],
# o2o
eq_(
[Address(id=1), Address(id=3), Address(id=4)],
- sess.query(Address).filter(Address.dingaling == None).
+ sess.query(Address).filter(Address.dingaling == None). # noqa
order_by(Address.id).all())
eq_(
[Address(id=1), Address(id=3), Address(id=4)],
order_by(Address.id).all())
eq_(
[Address(id=2), Address(id=5)],
- sess.query(Address).filter(Address.dingaling != None).
+ sess.query(Address).filter(Address.dingaling != None). # noqa
order_by(Address.id).all())
eq_(
[Address(id=2), Address(id=5)],
# m2o
eq_(
[Order(id=5)],
- sess.query(Order).filter(Order.address == None).all())
+ sess.query(Order).filter(Order.address == None).all()) # noqa
eq_(
[Order(id=1), Order(id=2), Order(id=3), Order(id=4)],
sess.query(Order).order_by(Order.id).
- filter(Order.address != None).all())
+ filter(Order.address != None).all()) # noqa
# o2m
eq_(
[User(id=10)],
- sess.query(User).filter(User.addresses == None).all())
+ sess.query(User).filter(User.addresses == None).all()) # noqa
eq_(
[User(id=7), User(id=8), User(id=9)],
- sess.query(User).filter(User.addresses != None).
+ sess.query(User).filter(User.addresses != None). # noqa
order_by(User.id).all())
def test_blank_filter_by(self):
options(joinedload(User.addresses)).all(), [
User(
name='ed', addresses=[Address(), Address(),
- Address()]),
+ Address()]),
User(name='fred', addresses=[Address()])]
)
self.assert_sql_count(testing.db, go, 1)
q = sess.query(User, Address.email_address.label('email_address'))
- l = q.join('addresses').options(joinedload(User.orders)).\
+ result = q.join('addresses').options(joinedload(User.orders)).\
order_by(
"email_address desc").limit(1).offset(0)
with expect_warnings(
addresses=[Address(id=1)]
), 'jack@bean.com')
],
- l.all())
+ result.all())
class TextWarningTest(QueryTest, AssertsCompiledSQL):
# test auto-lookup of property
o = sess.query(Order).with_parent(u1).all()
assert [Order(description="order 1"), Order(description="order 3"),
- Order(description="order 5")] == o
+ Order(description="order 5")] == o
# test with explicit property
o = sess.query(Order).with_parent(u1, property='orders').all()
assert [Order(description="order 1"), Order(description="order 3"),
- Order(description="order 5")] == o
+ Order(description="order 5")] == o
o = sess.query(Order).with_parent(u1, property=User.orders).all()
assert [Order(description="order 1"), Order(description="order 3"),
- Order(description="order 5")] == o
+ Order(description="order 5")] == o
o = sess.query(Order).filter(with_parent(u1, User.orders)).all()
assert [
o.all()
)
-
def test_with_pending_autoflush(self):
Order, User = self.classes.Order, self.classes.User
@classmethod
def setup_mappers(cls):
Address, addresses, users, User = (cls.classes.Address,
- cls.tables.addresses,
- cls.tables.users,
- cls.classes.User)
+ cls.tables.addresses,
+ cls.tables.users,
+ cls.classes.User)
mapper(Address, addresses)
assert_raises(
sa.orm.exc.NoResultFound,
(sess.query(User, Address).join(User.addresses).
- filter(Address.id == 99)).one)
+ filter(Address.id == 99)).one)
eq_((sess.query(User, Address).
- join(User.addresses).
- filter(Address.id == 4)).one(),
- (User(id=8), Address(id=4)))
+ join(User.addresses).
+ filter(Address.id == 4)).one(),
+ (User(id=8), Address(id=4)))
assert_raises(
sa.orm.exc.MultipleResultsFound,
r"Multiple rows were found for one_or_none\(\)",
sess.query(User).one_or_none)
- eq_(sess.query(User.id, User.name).filter(User.id == 99).one_or_none(), None)
+ eq_(sess.query(User.id, User.name).filter(User.id == 99).one_or_none(),
+ None)
eq_(sess.query(User.id, User.name).filter(User.id == 7).one_or_none(),
(7, 'jack'))
eq_(
(sess.query(User, Address).join(User.addresses).
- filter(Address.id == 99)).one_or_none(), None)
+ filter(Address.id == 99)).one_or_none(), None)
eq_((sess.query(User, Address).
- join(User.addresses).
- filter(Address.id == 4)).one_or_none(),
- (User(id=8), Address(id=4)))
+ join(User.addresses).
+ filter(Address.id == 4)).one_or_none(),
+ (User(id=8), Address(id=4)))
assert_raises(
sa.orm.exc.MultipleResultsFound,
with self._assert_bind_args(session):
session.query(func.max(User.score)).scalar()
-
from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
from sqlalchemy.testing import mock
+
class _JoinFixtures(object):
@classmethod
def setup_class(cls):
m = MetaData()
cls.left = Table('lft', m,
- Column('id', Integer, primary_key=True),
- Column('x', Integer),
- Column('y', Integer),
- )
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer))
cls.right = Table('rgt', m,
- Column('id', Integer, primary_key=True),
- Column('lid', Integer, ForeignKey('lft.id')),
- Column('x', Integer),
- Column('y', Integer),
- )
+ Column('id', Integer, primary_key=True),
+ Column('lid', Integer, ForeignKey('lft.id')),
+ Column('x', Integer),
+ Column('y', Integer))
cls.right_multi_fk = Table('rgt_multi_fk', m,
- Column('id', Integer, primary_key=True),
- Column('lid1', Integer, ForeignKey('lft.id')),
- Column('lid2', Integer, ForeignKey('lft.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('lid1', Integer,
+ ForeignKey('lft.id')),
+ Column('lid2', Integer,
+ ForeignKey('lft.id')))
cls.selfref = Table('selfref', m,
- Column('id', Integer, primary_key=True),
- Column('sid', Integer, ForeignKey('selfref.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('sid', Integer, ForeignKey('selfref.id')))
cls.composite_selfref = Table('composite_selfref', m,
- Column('id', Integer, primary_key=True),
- Column('group_id', Integer, primary_key=True),
- Column('parent_id', Integer),
- ForeignKeyConstraint(
- ['parent_id', 'group_id'],
- ['composite_selfref.id', 'composite_selfref.group_id']
- )
- )
+ Column('id', Integer, primary_key=True),
+ Column('group_id', Integer,
+ primary_key=True),
+ Column('parent_id', Integer),
+ ForeignKeyConstraint(
+ ['parent_id', 'group_id'],
+ ['composite_selfref.id',
+ 'composite_selfref.group_id']))
cls.m2mleft = Table('m2mlft', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True))
cls.m2mright = Table('m2mrgt', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True))
cls.m2msecondary = Table('m2msecondary', m,
- Column('lid', Integer, ForeignKey('m2mlft.id'), primary_key=True),
- Column('rid', Integer, ForeignKey('m2mrgt.id'), primary_key=True),
- )
+ Column('lid', Integer, ForeignKey(
+ 'm2mlft.id'), primary_key=True),
+ Column('rid', Integer, ForeignKey(
+ 'm2mrgt.id'), primary_key=True))
cls.m2msecondary_no_fks = Table('m2msecondary_no_fks', m,
- Column('lid', Integer, primary_key=True),
- Column('rid', Integer, primary_key=True),
- )
+ Column('lid', Integer,
+ primary_key=True),
+ Column('rid', Integer,
+ primary_key=True))
cls.m2msecondary_ambig_fks = Table('m2msecondary_ambig_fks', m,
- Column('lid1', Integer, ForeignKey('m2mlft.id'), primary_key=True),
- Column('rid1', Integer, ForeignKey('m2mrgt.id'), primary_key=True),
- Column('lid2', Integer, ForeignKey('m2mlft.id'), primary_key=True),
- Column('rid2', Integer, ForeignKey('m2mrgt.id'), primary_key=True),
- )
+ Column('lid1', Integer, ForeignKey(
+ 'm2mlft.id'), primary_key=True),
+ Column('rid1', Integer, ForeignKey(
+ 'm2mrgt.id'), primary_key=True),
+ Column('lid2', Integer, ForeignKey(
+ 'm2mlft.id'), primary_key=True),
+ Column('rid2', Integer, ForeignKey(
+ 'm2mrgt.id'), primary_key=True))
cls.base_w_sub_rel = Table('base_w_sub_rel', m,
- Column('id', Integer, primary_key=True),
- Column('sub_id', Integer, ForeignKey('rel_sub.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('sub_id', Integer,
+ ForeignKey('rel_sub.id')))
cls.rel_sub = Table('rel_sub', m,
- Column('id', Integer, ForeignKey('base_w_sub_rel.id'),
- primary_key=True)
- )
+ Column('id', Integer,
+ ForeignKey('base_w_sub_rel.id'),
+ primary_key=True))
cls.base = Table('base', m,
- Column('id', Integer, primary_key=True),
- Column('flag', Boolean)
- )
+ Column('id', Integer, primary_key=True),
+ Column('flag', Boolean))
cls.sub = Table('sub', m,
- Column('id', Integer, ForeignKey('base.id'),
- primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'),
+ primary_key=True))
cls.sub_w_base_rel = Table('sub_w_base_rel', m,
- Column('id', Integer, ForeignKey('base.id'),
- primary_key=True),
- Column('base_id', Integer, ForeignKey('base.id'))
- )
+ Column('id', Integer, ForeignKey('base.id'),
+ primary_key=True),
+ Column('base_id', Integer,
+ ForeignKey('base.id')))
cls.sub_w_sub_rel = Table('sub_w_sub_rel', m,
- Column('id', Integer, ForeignKey('base.id'),
- primary_key=True),
- Column('sub_id', Integer, ForeignKey('sub.id'))
- )
+ Column('id', Integer, ForeignKey('base.id'),
+ primary_key=True),
+ Column('sub_id', Integer,
+ ForeignKey('sub.id'))
+ )
cls.right_w_base_rel = Table('right_w_base_rel', m,
- Column('id', Integer, primary_key=True),
- Column('base_id', Integer, ForeignKey('base.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('base_id', Integer,
+ ForeignKey('base.id')))
cls.three_tab_a = Table('three_tab_a', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True))
cls.three_tab_b = Table('three_tab_b', m,
- Column('id', Integer, primary_key=True),
- Column('aid', Integer, ForeignKey('three_tab_a.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('aid', Integer, ForeignKey(
+ 'three_tab_a.id')))
cls.three_tab_c = Table('three_tab_c', m,
- Column('id', Integer, primary_key=True),
- Column('aid', Integer, ForeignKey('three_tab_a.id')),
- Column('bid', Integer, ForeignKey('three_tab_b.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('aid', Integer, ForeignKey(
+ 'three_tab_a.id')),
+ Column('bid', Integer, ForeignKey(
+ 'three_tab_b.id')))
cls.composite_target = Table('composite_target', m,
- Column('uid', Integer, primary_key=True),
- Column('oid', Integer, primary_key=True),
- )
+ Column('uid', Integer, primary_key=True),
+ Column('oid', Integer, primary_key=True))
- cls.composite_multi_ref = Table('composite_multi_ref', m,
+ cls.composite_multi_ref = Table(
+ 'composite_multi_ref', m,
Column('uid1', Integer),
Column('uid2', Integer),
Column('oid', Integer),
ForeignKeyConstraint(("uid1", "oid"),
- ("composite_target.uid", "composite_target.oid")),
+ ("composite_target.uid",
+ "composite_target.oid")),
ForeignKeyConstraint(("uid2", "oid"),
- ("composite_target.uid", "composite_target.oid")),
- )
+ ("composite_target.uid",
+ "composite_target.oid")))
cls.purely_single_col = Table('purely_single_col', m,
- Column('path', String)
- )
+ Column('path', String))
def _join_fixture_overlapping_three_tables(self, **kw):
def _can_sync(*cols):
def _join_fixture_m2m(self, **kw):
return relationships.JoinCondition(
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- self.m2mright,
- secondary=self.m2msecondary,
- **kw
- )
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ secondary=self.m2msecondary,
+ **kw
+ )
def _join_fixture_m2m_backref(self, **kw):
"""return JoinCondition in the same way RelationshipProperty
"""
j1 = self._join_fixture_m2m()
return j1, relationships.JoinCondition(
- self.m2mright,
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- secondary=self.m2msecondary,
- primaryjoin=j1.secondaryjoin_minus_local,
- secondaryjoin=j1.primaryjoin_minus_local
- )
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ secondary=self.m2msecondary,
+ primaryjoin=j1.secondaryjoin_minus_local,
+ secondaryjoin=j1.primaryjoin_minus_local
+ )
def _join_fixture_o2m(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
- self.right,
- **kw
- )
+ self.left,
+ self.right,
+ self.left,
+ self.right,
+ **kw
+ )
def _join_fixture_m2o(self, **kw):
return relationships.JoinCondition(
- self.right,
- self.left,
- self.right,
- self.left,
- **kw
- )
+ self.right,
+ self.left,
+ self.right,
+ self.left,
+ **kw
+ )
def _join_fixture_o2m_selfref(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
self.composite_selfref,
remote_side=set([self.composite_selfref.c.id,
- self.composite_selfref.c.group_id]),
+ self.composite_selfref.c.group_id]),
**kw
)
self.composite_selfref,
primaryjoin=and_(
self.composite_selfref.c.group_id ==
- func.foo(self.composite_selfref.c.group_id),
+ func.foo(self.composite_selfref.c.group_id),
self.composite_selfref.c.parent_id ==
- self.composite_selfref.c.id
+ self.composite_selfref.c.id
),
**kw
)
self.composite_selfref,
primaryjoin=and_(
self.composite_selfref.c.group_id ==
- func.foo(self.composite_selfref.c.group_id),
+ func.foo(self.composite_selfref.c.group_id),
self.composite_selfref.c.parent_id ==
- self.composite_selfref.c.id
+ self.composite_selfref.c.id
),
remote_side=set([self.composite_selfref.c.parent_id]),
**kw
self.composite_selfref,
primaryjoin=and_(
remote(self.composite_selfref.c.group_id) ==
- func.foo(self.composite_selfref.c.group_id),
+ func.foo(self.composite_selfref.c.group_id),
remote(self.composite_selfref.c.parent_id) ==
- self.composite_selfref.c.id
+ self.composite_selfref.c.id
),
**kw
)
self.right,
self.left,
self.right,
- primaryjoin=(self.left.c.x + self.left.c.y) == \
- relationships.remote(relationships.foreign(
- self.right.c.x * self.right.c.y
- )),
+ primaryjoin=(self.left.c.x + self.left.c.y) ==
+ relationships.remote(relationships.foreign(
+ self.right.c.x * self.right.c.y
+ )),
**kw
)
self.right,
self.left,
self.right,
- primaryjoin=(self.left.c.x + self.left.c.y) == \
- relationships.foreign(
- self.right.c.x * self.right.c.y
- ),
+ primaryjoin=(self.left.c.x + self.left.c.y) ==
+ relationships.foreign(
+ self.right.c.x * self.right.c.y
+ ),
**kw
)
self.right,
self.left,
self.right,
- primaryjoin=(self.left.c.x + self.left.c.y) == \
- (
- self.right.c.x * self.right.c.y
- ),
+ primaryjoin=(self.left.c.x + self.left.c.y) ==
+ (
+ self.right.c.x * self.right.c.y
+ ),
**kw
)
def _join_fixture_base_to_joined_sub(self, **kw):
# see test/orm/inheritance/test_abc_inheritance:TestaTobM2O
# and others there
- right = self.base_w_sub_rel.join(self.rel_sub,
+ right = self.base_w_sub_rel.join(
+ self.rel_sub,
self.base_w_sub_rel.c.id == self.rel_sub.c.id
)
return relationships.JoinCondition(
right,
self.base_w_sub_rel,
self.rel_sub,
- primaryjoin=self.base_w_sub_rel.c.sub_id == \
- self.rel_sub.c.id,
+ primaryjoin=self.base_w_sub_rel.c.sub_id ==
+ self.rel_sub.c.id,
**kw
)
def _join_fixture_o2m_joined_sub_to_base(self, **kw):
left = self.base.join(self.sub_w_base_rel,
- self.base.c.id == self.sub_w_base_rel.c.id)
+ self.base.c.id == self.sub_w_base_rel.c.id)
return relationships.JoinCondition(
left,
self.base,
# m2o has a problem at the time of this test.
left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
right = self.base.join(self.sub_w_base_rel,
- self.base.c.id == self.sub_w_base_rel.c.id)
+ self.base.c.id == self.sub_w_base_rel.c.id)
return relationships.JoinCondition(
left,
right,
def _join_fixture_o2m_joined_sub_to_sub(self, **kw):
left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
right = self.base.join(self.sub_w_sub_rel,
- self.base.c.id == self.sub_w_sub_rel.c.id)
+ self.base.c.id == self.sub_w_sub_rel.c.id)
return relationships.JoinCondition(
left,
right,
def _join_fixture_m2o_sub_to_joined_sub(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
right = self.base.join(self.right_w_base_rel,
- self.base.c.id == self.right_w_base_rel.c.id)
+ self.base.c.id == self.right_w_base_rel.c.id)
return relationships.JoinCondition(
self.right_w_base_rel,
right,
def _join_fixture_m2o_sub_to_joined_sub_func(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
right = self.base.join(self.right_w_base_rel,
- self.base.c.id == self.right_w_base_rel.c.id)
+ self.base.c.id == self.right_w_base_rel.c.id)
return relationships.JoinCondition(
self.right_w_base_rel,
right,
self.right_w_base_rel,
self.right_w_base_rel,
- primaryjoin=self.right_w_base_rel.c.base_id == \
- func.foo(self.base.c.id)
+ primaryjoin=self.right_w_base_rel.c.base_id ==
+ func.foo(self.base.c.id)
)
def _join_fixture_o2o_joined_sub_to_base(self, **kw):
left = self.base.join(self.sub,
- self.base.c.id == self.sub.c.id)
+ self.base.c.id == self.sub.c.id)
# see test_relationships->AmbiguousJoinInterpretedAsSelfRef
return relationships.JoinCondition(
def _join_fixture_o2m_to_annotated_func(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
- self.right,
- primaryjoin=self.left.c.id ==
- foreign(func.foo(self.right.c.lid)),
- **kw
- )
+ self.left,
+ self.right,
+ self.left,
+ self.right,
+ primaryjoin=self.left.c.id ==
+ foreign(func.foo(self.right.c.lid)),
+ **kw
+ )
def _join_fixture_o2m_to_oldstyle_func(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
- self.right,
- primaryjoin=self.left.c.id ==
- func.foo(self.right.c.lid),
- consider_as_foreign_keys=[self.right.c.lid],
- **kw
- )
+ self.left,
+ self.right,
+ self.left,
+ self.right,
+ primaryjoin=self.left.c.id ==
+ func.foo(self.right.c.lid),
+ consider_as_foreign_keys=[self.right.c.lid],
+ **kw
+ )
def _join_fixture_overlapping_composite_fks(self, **kw):
return relationships.JoinCondition(
- self.composite_target,
- self.composite_multi_ref,
- self.composite_target,
- self.composite_multi_ref,
- consider_as_foreign_keys=[self.composite_multi_ref.c.uid2,
- self.composite_multi_ref.c.oid],
- **kw
- )
-
+ self.composite_target,
+ self.composite_multi_ref,
+ self.composite_target,
+ self.composite_multi_ref,
+ consider_as_foreign_keys=[self.composite_multi_ref.c.uid2,
+ self.composite_multi_ref.c.oid],
+ **kw
+ )
cls.left = Table('lft', m,
- Column('id', Integer, primary_key=True),
- Column('x', Integer),
- Column('y', Integer),
- )
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer))
cls.right = Table('rgt', m,
- Column('id', Integer, primary_key=True),
- Column('lid', Integer, ForeignKey('lft.id')),
- Column('x', Integer),
- Column('y', Integer),
- )
+ Column('id', Integer, primary_key=True),
+ Column('lid', Integer, ForeignKey('lft.id')),
+ Column('x', Integer),
+ Column('y', Integer))
def _join_fixture_o2m_o_side_none(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
- self.right,
- primaryjoin=and_(self.left.c.id == self.right.c.lid,
- self.left.c.x == 5),
- **kw
- )
+ self.left,
+ self.right,
+ self.left,
+ self.right,
+ primaryjoin=and_(self.left.c.id == self.right.c.lid,
+ self.left.c.x == 5),
+ **kw
+ )
def _join_fixture_purely_single_o2m(self, **kw):
return relationships.JoinCondition(
- self.purely_single_col,
- self.purely_single_col,
- self.purely_single_col,
- self.purely_single_col,
- support_sync=False,
- primaryjoin=
- self.purely_single_col.c.path.like(
- remote(
- foreign(
- self.purely_single_col.c.path.concat('%')
- )
- )
- )
+ self.purely_single_col,
+ self.purely_single_col,
+ self.purely_single_col,
+ self.purely_single_col,
+ support_sync=False,
+ primaryjoin=self.purely_single_col.c.path.like(
+ remote(
+ foreign(
+ self.purely_single_col.c.path.concat('%')
+ )
)
+ )
+ )
def _join_fixture_purely_single_m2o(self, **kw):
return relationships.JoinCondition(
- self.purely_single_col,
- self.purely_single_col,
- self.purely_single_col,
- self.purely_single_col,
- support_sync=False,
- primaryjoin=
- remote(self.purely_single_col.c.path).like(
- foreign(self.purely_single_col.c.path.concat('%'))
- )
- )
+ self.purely_single_col,
+ self.purely_single_col,
+ self.purely_single_col,
+ self.purely_single_col,
+ support_sync=False,
+ primaryjoin=remote(self.purely_single_col.c.path).like(
+ foreign(self.purely_single_col.c.path.concat('%'))
+ )
+ )
def _join_fixture_remote_local_multiple_ref(self, **kw):
- fn = lambda a, b: ((a == b) | (b == a))
+ def fn(a, b): return ((a == b) | (b == a))
return relationships.JoinCondition(
self.selfref, self.selfref,
self.selfref, self.selfref,
local_selectable, remote_selectable,
primaryjoin=and_(
sub_w_sub_rel__sub_id == sub__id,
- sub_w_sub_rel__flag == True
+ sub_w_sub_rel__flag == True # noqa
),
prop=prop
)
)
def _assert_raises_no_relevant_fks(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
exc.ArgumentError,
r"Could not locate any relevant foreign key columns "
)
def _assert_raises_no_equality(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
exc.ArgumentError,
"Could not locate any simple equality expressions "
)
def _assert_raises_ambig_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.AmbiguousForeignKeysError,
fn, *arg, **kw)
def _assert_raises_no_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.NoForeignKeysError,
class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase,
- AssertsCompiledSQL):
+ AssertsCompiledSQL):
def test_determine_local_remote_pairs_o2o_joined_sub_to_base(self):
joincond = self._join_fixture_o2o_joined_sub_to_base()
eq_(
def test_determine_remote_columns_compound_1(self):
joincond = self._join_fixture_compound_expression_1(
- support_sync=False)
+ support_sync=False)
eq_(
joincond.remote_columns,
set([self.right.c.x, self.right.c.y])
def test_determine_local_remote_compound_1(self):
joincond = self._join_fixture_compound_expression_1(
- support_sync=False)
+ support_sync=False)
eq_(
joincond.local_remote_pairs,
[
def test_determine_local_remote_compound_2(self):
joincond = self._join_fixture_compound_expression_2(
- support_sync=False)
+ support_sync=False)
eq_(
joincond.local_remote_pairs,
[
def test_determine_remote_columns_compound_2(self):
joincond = self._join_fixture_compound_expression_2(
- support_sync=False)
+ support_sync=False)
eq_(
joincond.remote_columns,
set([self.right.c.x, self.right.c.y])
)
-
def test_determine_remote_columns_o2m(self):
joincond = self._join_fixture_o2m()
eq_(
joincond.local_remote_pairs,
[
(self.composite_selfref.c.group_id,
- self.composite_selfref.c.group_id),
+ self.composite_selfref.c.group_id),
(self.composite_selfref.c.id,
- self.composite_selfref.c.parent_id),
+ self.composite_selfref.c.parent_id),
]
)
- def test_determine_local_remote_pairs_o2m_composite_selfref_func_warning(self):
+ def test_determine_local_remote_pairs_o2m_composite_selfref_func_warning(
+ self):
self._assert_non_simple_warning(
self._join_fixture_o2m_composite_selfref_func
)
self._join_fixture_m2o_sub_to_joined_sub_func
)
- def test_determine_local_remote_pairs_o2m_composite_selfref_func_annotated(self):
+ def test_determine_local_remote_pairs_o2m_composite_selfref_func_annotated(
+ self):
joincond = self._join_fixture_o2m_composite_selfref_func_annotated()
eq_(
joincond.local_remote_pairs,
[
(self.composite_selfref.c.group_id,
- self.composite_selfref.c.group_id),
+ self.composite_selfref.c.group_id),
(self.composite_selfref.c.id,
- self.composite_selfref.c.parent_id),
+ self.composite_selfref.c.parent_id),
]
)
eq_(
joincond.remote_columns,
set([self.composite_selfref.c.id,
- self.composite_selfref.c.group_id])
+ self.composite_selfref.c.group_id])
)
def test_determine_remote_columns_m2o(self):
eq_(
joincond.local_remote_pairs,
[(self.m2mleft.c.id, self.m2msecondary.c.lid),
- (self.m2mright.c.id, self.m2msecondary.c.rid)]
+ (self.m2mright.c.id, self.m2msecondary.c.rid)]
)
def test_determine_local_remote_pairs_m2m_backref(self):
eq_(
j1.local_remote_pairs,
[(self.m2mleft.c.id, self.m2msecondary.c.lid),
- (self.m2mright.c.id, self.m2msecondary.c.rid)]
+ (self.m2mright.c.id, self.m2msecondary.c.rid)]
)
eq_(
j2.local_remote_pairs,
set([self.m2msecondary.c.lid, self.m2msecondary.c.rid])
)
-
def test_determine_remote_columns_m2o_selfref(self):
joincond = self._join_fixture_m2o_selfref()
eq_(
eq_(
joincond.local_remote_pairs,
[
- (self.composite_target.c.uid, self.composite_multi_ref.c.uid2,),
+ (self.composite_target.c.uid,
+ self.composite_multi_ref.c.uid2,),
(self.composite_target.c.oid, self.composite_multi_ref.c.oid,)
]
)
set([self.base.c.flag, self.sub_w_sub_rel.c.sub_id])
)
+
class DirectionTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
def test_determine_direction_compound_2(self):
joincond = self._join_fixture_compound_expression_2(
- support_sync=False)
+ support_sync=False)
is_(
joincond.direction,
ONETOMANY
joincond = self._join_fixture_purely_single_m2o()
is_(joincond.direction, MANYTOONE)
+
class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
def test_determine_join_o2m(self):
joincond = self._join_fixture_o2m()
self.assert_compile(
- joincond.primaryjoin,
- "lft.id = rgt.lid"
+ joincond.primaryjoin,
+ "lft.id = rgt.lid"
)
def test_determine_join_o2m_selfref(self):
joincond = self._join_fixture_o2m_selfref()
self.assert_compile(
- joincond.primaryjoin,
- "selfref.id = selfref.sid"
+ joincond.primaryjoin,
+ "selfref.id = selfref.sid"
)
def test_determine_join_m2o_selfref(self):
joincond = self._join_fixture_m2o_selfref()
self.assert_compile(
- joincond.primaryjoin,
- "selfref.id = selfref.sid"
+ joincond.primaryjoin,
+ "selfref.id = selfref.sid"
)
def test_determine_join_o2m_composite_selfref(self):
joincond = self._join_fixture_o2m_composite_selfref()
self.assert_compile(
- joincond.primaryjoin,
- "composite_selfref.group_id = composite_selfref.group_id "
- "AND composite_selfref.id = composite_selfref.parent_id"
+ joincond.primaryjoin,
+ "composite_selfref.group_id = composite_selfref.group_id "
+ "AND composite_selfref.id = composite_selfref.parent_id"
)
def test_determine_join_m2o_composite_selfref(self):
joincond = self._join_fixture_m2o_composite_selfref()
self.assert_compile(
- joincond.primaryjoin,
- "composite_selfref.group_id = composite_selfref.group_id "
- "AND composite_selfref.id = composite_selfref.parent_id"
+ joincond.primaryjoin,
+ "composite_selfref.group_id = composite_selfref.group_id "
+ "AND composite_selfref.id = composite_selfref.parent_id"
)
def test_determine_join_m2o(self):
joincond = self._join_fixture_m2o()
self.assert_compile(
- joincond.primaryjoin,
- "lft.id = rgt.lid"
+ joincond.primaryjoin,
+ "lft.id = rgt.lid"
)
def test_determine_join_ambiguous_fks_o2m(self):
"should be counted as containing a foreign "
"key reference to the parent table.",
relationships.JoinCondition,
- self.left,
- self.right_multi_fk,
- self.left,
- self.right_multi_fk,
+ self.left,
+ self.right_multi_fk,
+ self.left,
+ self.right_multi_fk,
)
def test_determine_join_no_fks_o2m(self):
self._assert_raises_no_join(
relationships.JoinCondition,
"None", None,
- self.left,
- self.selfref,
- self.left,
- self.selfref,
+ self.left,
+ self.selfref,
+ self.left,
+ self.selfref,
)
-
def test_determine_join_ambiguous_fks_m2m(self):
self._assert_raises_ambig_join(
self._assert_raises_no_join(
relationships.JoinCondition,
"None", self.m2msecondary_no_fks,
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- self.m2mright,
- secondary=self.m2msecondary_no_fks
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ secondary=self.m2msecondary_no_fks
)
def _join_fixture_fks_ambig_m2m(self):
return relationships.JoinCondition(
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- self.m2mright,
- secondary=self.m2msecondary_ambig_fks,
- consider_as_foreign_keys=[
- self.m2msecondary_ambig_fks.c.lid1,
- self.m2msecondary_ambig_fks.c.rid1]
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ secondary=self.m2msecondary_ambig_fks,
+ consider_as_foreign_keys=[
+ self.m2msecondary_ambig_fks.c.lid1,
+ self.m2msecondary_ambig_fks.c.rid1]
)
def test_determine_join_w_fks_ambig_m2m(self):
joincond = self._join_fixture_fks_ambig_m2m()
self.assert_compile(
- joincond.primaryjoin,
- "m2mlft.id = m2msecondary_ambig_fks.lid1"
+ joincond.primaryjoin,
+ "m2mlft.id = m2msecondary_ambig_fks.lid1"
)
self.assert_compile(
- joincond.secondaryjoin,
- "m2mrgt.id = m2msecondary_ambig_fks.rid1"
+ joincond.secondaryjoin,
+ "m2mrgt.id = m2msecondary_ambig_fks.rid1"
)
+
class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
joincond = self._join_fixture_o2m_selfref()
left = select([joincond.parent_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- left,
- joincond.child_selectable,
- True)
+ left,
+ joincond.child_selectable,
+ True)
self.assert_compile(
pj, "pj.id = selfref.sid"
)
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
- True)
+ joincond.parent_selectable,
+ right,
+ True)
self.assert_compile(
pj, "selfref.id = pj.sid"
)
-
def test_join_targets_o2m_plain(self):
joincond = self._join_fixture_o2m()
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- joincond.child_selectable,
- False)
+ joincond.parent_selectable,
+ joincond.child_selectable,
+ False)
self.assert_compile(
pj, "lft.id = rgt.lid"
)
joincond = self._join_fixture_o2m()
left = select([joincond.parent_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- left,
- joincond.child_selectable,
- True)
+ left,
+ joincond.child_selectable,
+ True)
self.assert_compile(
pj, "pj.id = rgt.lid"
)
joincond = self._join_fixture_o2m()
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
- True)
+ joincond.parent_selectable,
+ right,
+ True)
self.assert_compile(
pj, "lft.id = pj.lid"
)
joincond = self._join_fixture_o2m_composite_selfref()
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
- True)
+ joincond.parent_selectable,
+ right,
+ True)
self.assert_compile(
pj,
"pj.group_id = composite_selfref.group_id "
joincond = self._join_fixture_m2o_composite_selfref()
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
- True)
+ joincond.parent_selectable,
+ right,
+ True)
self.assert_compile(
pj,
"pj.group_id = composite_selfref.group_id "
"AND pj.id = composite_selfref.parent_id"
)
+
class LazyClauseTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
)
def test_lazy_clause_o2m_o_side_none(self):
- # test for #2948. When the join is "o.id == m.oid AND o.something == something",
+ # test for #2948. When the join is "o.id == m.oid
+ # AND o.something == something",
# we don't want 'o' brought into the lazy load for 'm'
joincond = self._join_fixture_o2m_o_side_none()
lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause()
def test_lazy_clause_o2m_o_side_none_reverse(self):
# continued test for #2948.
joincond = self._join_fixture_o2m_o_side_none()
- lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause(reverse_direction=True)
+ lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause(
+ reverse_direction=True)
self.assert_compile(
lazywhere,
"lft.id = :param_1 AND lft.x = :x_1",
- checkparams= {'param_1': None, 'x_1': 5}
+ checkparams={'param_1': None, 'x_1': 5}
)
def test_lazy_clause_remote_local_multiple_ref(self):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.reports_to_id, employee_t.c.emp_id),
- ])
+ ])
)
def _setup_data(self, sess):
self.tables.tableA)
for cascade in ("save-update, delete",
- #"save-update, delete-orphan",
+ # "save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(B, tableB, properties={
'a': relationship(A, cascade=cascade, single_parent=True)
self.tables.tableA)
for cascade in ("save-update, delete",
- #"save-update, delete-orphan",
+ # "save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(A, tableA, properties={
'bs': relationship(B, cascade=cascade)
sess.commit()
eq_(sess.query(Address).all(), [
Address(email='a1', user=User(name='u1'))
- ])
+ ])
class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
assert_raises_message(sa.exc.ArgumentError,
r"reverse_property 'dingaling' on relationship "
r"User.addresses references "
- r"relationship Address.dingaling, which does not "
+ r"relationship Address.dingaling, "
+ r"which does not "
r"reference mapper Mapper\|User\|users",
configure_mappers)
-
class NoLoadBackPopulates(_fixtures.FixtureTest):
"""test the noload stratgegy which unlike others doesn't use
A, B, C, D = cls.classes.A, cls.classes.B, cls.classes.C, cls.classes.D
a, b, c, d = cls.tables.a, cls.tables.b, cls.tables.c, cls.tables.d
j = sa.join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id)
- #j = join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id).alias()
+ # j = join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id) \
+ # .alias()
mapper(A, a, properties={
"b": relationship(B),
"d": relationship(
D, secondary=j,
primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
secondaryjoin=d.c.id == b.c.d_id,
- #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
- #secondaryjoin=d.c.id == j.c.b_d_id,
+ # primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
+ # secondaryjoin=d.c.id == j.c.b_d_id,
uselist=False,
viewonly=True
)
mapper(Bar, bars)
self._assert_raises_no_equality(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
+ "foos.id > foos.fid", "Foo.foos",
+ "primary")
def test_no_equated_viewonly(self):
bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
# ensure m2m backref is set up with correct annotations
# [ticket:2578]
mapper(Foo, foos, properties={
- 'bars': relationship(Bar, secondary=foobars_with_fks, backref="foos")
+ 'bars': relationship(Bar, secondary=foobars_with_fks,
+ backref="foos")
})
mapper(Bar, bars)
sa.orm.configure_mappers()
from sqlalchemy.testing import fixtures
-
class _ScopedTest(fixtures.MappedTest):
"""Adds another lookup bucket to emulate Session globals."""
@classmethod
def define_tables(cls, metadata):
Table('table1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)))
Table('table2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('someid', None, ForeignKey('table1.id')))
def test_basic(self):
class SomeObject(fixtures.ComparableEntity):
query = Session.query_property()
+
class SomeOtherObject(fixtures.ComparableEntity):
query = Session.query_property()
custom_query = Session.query_property(query_cls=CustomQuery)
mapper(SomeObject, table1, properties={
- 'options':relationship(SomeOtherObject)})
+ 'options': relationship(SomeOtherObject)})
mapper(SomeOtherObject, table2)
s = SomeObject(id=1, data="hello")
Session.refresh(sso)
Session.remove()
- eq_(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]),
+ eq_(SomeObject(id=1, data="hello",
+ options=[SomeOtherObject(someid=1)]),
Session.query(SomeObject).one())
- eq_(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]),
+ eq_(SomeObject(id=1, data="hello",
+ options=[SomeOtherObject(someid=1)]),
SomeObject.query.one())
eq_(SomeOtherObject(someid=1),
SomeOtherObject.query.filter(
"At least one scoped session is already present. ",
Session.configure, bind=testing.db
)
-
-
-
from sqlalchemy.testing import fixtures
-
# TODO: more tests mapping to selects
class SelectableNoFromsTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('common', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', Integer),
Column('extra', String(45)))
def test_no_tables(self):
Subset = self.classes.Subset
-
selectable = select([column("x"), column("y"), column("z")]).alias()
mapper(Subset, selectable, primary_key=[selectable.c.x])
def test_no_table_needs_pl(self):
Subset = self.classes.Subset
-
selectable = select([column("x"), column("y"), column("z")]).alias()
assert_raises_message(
sa.exc.ArgumentError,
Subset, common = self.classes.Subset, self.tables.common
subset_select = select([common.c.id, common.c.data])
- assert_raises(sa.exc.InvalidRequestError, mapper, Subset, subset_select)
+ assert_raises(sa.exc.InvalidRequestError,
+ mapper, Subset, subset_select)
def test_basic(self):
Subset, common = self.classes.Subset, self.tables.common
sess.expunge_all()
eq_(sess.query(Subset).all(), [Subset(data=1)])
- eq_(sess.query(Subset).filter(Subset.data==1).one(), Subset(data=1))
- eq_(sess.query(Subset).filter(Subset.data!=1).first(), None)
+ eq_(sess.query(Subset).filter(Subset.data == 1).one(), Subset(data=1))
+ eq_(sess.query(Subset).filter(Subset.data != 1).first(), None)
subset_select = sa.orm.class_mapper(Subset).mapped_table
- eq_(sess.query(Subset).filter(subset_select.c.data==1).one(),
+ eq_(sess.query(Subset).filter(subset_select.c.data == 1).one(),
Subset(data=1))
-
-
users = self.tables.users
-
sess = create_session(bind=self.metadata.bind)
users.insert().execute(id=7, name='jack')
{'id': 7}).fetchall(),
[(7, 'jack')])
-
# use :bindparam style
- eq_(sess.scalar("select id from users where id=:id",
- {'id': 7}),
- 7)
+ eq_(sess.scalar("select id from users where id=:id", {'id': 7}), 7)
def test_parameter_execute(self):
users = self.tables.users
sess = Session(bind=testing.db)
sess.execute(users.insert(), [
- {"id": 7, "name": "u7"},
- {"id": 8, "name": "u8"}
- ]
- )
+ {"id": 7, "name": "u7"},
+ {"id": 8, "name": "u8"}
+ ])
sess.execute(users.insert(), {"id": 9, "name": "u9"})
eq_(
- sess.execute(sa.select([users.c.id]).\
- order_by(users.c.id)).fetchall(),
+ sess.execute(sa.select([users.c.id]).
+ order_by(users.c.id)).fetchall(),
[(7, ), (8, ), (9, )]
)
sess.commit()
assert conn1.execute("select count(1) from users").scalar() == 1
- assert testing.db.connect().execute('select count(1) from users'
- ).scalar() == 1
+ assert testing.db.connect().execute('select count(1) from users') \
+ .scalar() == 1
sess.close()
+
class SessionUtilTest(_fixtures.FixtureTest):
run_inserts = None
u = User()
u.name = 'ed'
sess.add(u)
+
def go(obj):
assert u not in sess.query(User).all()
testing.run_as_contextmanager(sess.no_autoflush, go)
"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
'addresses': relationship(Address, backref="user")})
gc_collect()
eq_(q.one(), Address(email_address='foo'))
-
-
@testing.requires.independent_connections
@engines.close_open_connections
def test_autoflush_unbound(self):
assert u2 is u
assert sess.execute('select count(1) from users',
mapper=User).scalar() == 1
- assert testing.db.connect().execute('select count(1) from '
- 'users').scalar() == 0
+ assert testing.db.connect().execute('select count(1) from users') \
+ .scalar() == 0
sess.commit()
assert sess.execute('select count(1) from users',
mapper=User).scalar() == 1
- assert testing.db.connect().execute('select count(1) from '
- 'users').scalar() == 1
+ assert testing.db.connect().execute('select count(1) from users') \
+ .scalar() == 1
sess.close()
- except:
+ except Exception:
sess.rollback()
raise
sess.add(u)
sess.commit()
assert conn1.execute('select count(1) from users').scalar() == 1
- assert testing.db.connect().execute('select count(1) from users'
- ).scalar() == 1
+ assert testing.db.connect().execute('select count(1) from users') \
+ .scalar() == 1
sess.commit()
-
def test_autocommit_doesnt_raise_on_pending(self):
User, users = self.classes.User, self.tables.users
sess.rollback()
assert not sess.is_active
-
@engines.close_open_connections
def test_add_delete(self):
User, Address, addresses, users = (self.classes.User,
- self.classes.Address,
- self.tables.addresses,
- self.tables.users)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users)
s = create_session()
mapper(User, users, properties={
users = self.tables.users
sess = create_session()
+
class Foo(object):
def __init__(self):
sess.add(self)
+
class Bar(Foo):
def __init__(self):
sess.add(self)
def test_autoflush_rollback(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
def test_expunge_cascade(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
+ backref=backref("user", cascade="all"),
+ cascade="all")})
session = create_session()
u = session.query(User).filter_by(id=7).one()
self._assert_no_cycle(u1)
self._assert_not_modified(u1)
+
class WeakIdentityMapTest(_fixtures.FixtureTest):
run_inserts = None
users, User = self.tables.users, self.classes.User
-
s = create_session()
mapper(User, users)
@testing.requires.predictable_gc
def test_weakref_with_cycles_o2m(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
s = sessionmaker()()
mapper(User, users, properties={
@testing.requires.predictable_gc
def test_weakref_with_cycles_o2o(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
s = sessionmaker()()
mapper(User, users, properties={
User, Address = self._default_mapping_fixture()
s = Session()
- u = User(name='fred', addresses=[
- Address(email_address='foo')])
+ u = User(name='fred', addresses=[Address(email_address='foo')])
s.add(u)
s.commit()
u.id
+
def go():
assert not s.is_modified(u)
self.assert_sql_count(
s.commit()
u.id
+
def go():
assert not s.is_modified(u, passive=True)
self.assert_sql_count(
)
u.name = 'newname'
+
def go():
assert s.is_modified(u, passive=True)
self.assert_sql_count(
s.commit()
assert not s.is_modified(u)
+
class DisposedStates(fixtures.MappedTest):
run_setup_mappers = 'once'
run_inserts = 'once'
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- Column('data', String(50)))
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
@classmethod
def setup_classes(cls):
T = self.classes.T
sess = create_session(**kwargs)
- data = o1, o2, o3, o4, o5 = [T('t1'), T('t2'), T('t3'), T('t4'
- ), T('t5')]
+ data = o1, o2, o3, o4, o5 = [T('t1'), T('t2'), T('t3'), T('t4'),
+ T('t5')]
sess.add_all(data)
return ok
def _map_it(self, cls):
- return mapper(cls, Table('t', sa.MetaData(), Column('id',
- Integer, primary_key=True,
- test_needs_autoincrement=True)))
+ return mapper(cls, Table('t', sa.MetaData(),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)))
def _test_instance_guards(self, user_arg):
watchdog = set()
watchdog.add(method)
callable_ = getattr(obj, method)
assert_raises(sa.orm.exc.UnmappedInstanceError,
- callable_, *args, **kw)
+ callable_, *args, **kw)
def raises_(method, *args, **kw):
x_raises_(create_session(), method, *args, **kw)
@classmethod
def define_tables(cls, metadata):
- Table('users', metadata, Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Table('users', metadata, Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(20)), test_needs_acid=True)
@classmethod
self.bind.commit()
-
class FlushWarningsTest(fixtures.MappedTest):
run_setup_mappers = 'each'
@classmethod
def define_tables(cls, metadata):
Table('user', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(20))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(20)))
Table('address', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('user_id', Integer, ForeignKey('user.id')),
- Column('email', String(20))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('user.id')),
+ Column('email', String(20)))
@classmethod
def setup_classes(cls):
class User(cls.Basic):
pass
+
class Address(cls.Basic):
pass
user, User = cls.tables.user, cls.classes.User
address, Address = cls.tables.address, cls.classes.Address
mapper(User, user, properties={
- 'addresses': relationship(Address, backref="user")
- })
+ 'addresses': relationship(Address, backref="user")
+ })
mapper(Address, address)
def test_o2m_cascade_add(self):
Address = self.classes.Address
+
def evt(mapper, conn, instance):
instance.addresses.append(Address(email='x1'))
self._test(evt, "collection append")
def test_m2o_cascade_add(self):
User = self.classes.User
+
def evt(mapper, conn, instance):
instance.addresses[0].user = User(name='u2')
self._test(evt, "related attribute set")
def test_plain_add(self):
Address = self.classes.Address
+
def evt(mapper, conn, instance):
object_session(instance).add(Address(email='x1'))
self._test(evt, r"Session.add\(\)")
def test_plain_merge(self):
Address = self.classes.Address
+
def evt(mapper, conn, instance):
object_session(instance).merge(Address(email='x1'))
self._test(evt, r"Session.merge\(\)")
def test_plain_delete(self):
Address = self.classes.Address
+
def evt(mapper, conn, instance):
object_session(instance).delete(Address(email='x1'))
self._test(evt, r"Session.delete\(\)")
from sqlalchemy.orm import with_polymorphic
+from .inheritance._poly_fixtures import _Polymorphic, Person, Engineer, \
+ Paperwork, Machine, MachineType, Company
+
+
class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
run_inserts = 'once'
run_deletes = None
def test_basic(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(
- mapper(Address, addresses),
- order_by=Address.id)
+ 'addresses': relationship(
+ mapper(Address, addresses),
+ order_by=Address.id)
})
sess = create_session()
def go():
eq_(
- [User(id=7, addresses=[
- Address(id=1, email_address='jack@bean.com')])],
- q.filter(User.id==7).all()
+ [User(id=7, addresses=[
+ Address(id=1, email_address='jack@bean.com')])],
+ q.filter(User.id == 7).all()
)
self.assert_sql_count(testing.db, go, 2)
self.assert_sql_count(testing.db, go, 2)
def test_from_aliased(self):
- users, Dingaling, User, dingalings, Address, addresses = (self.tables.users,
- self.classes.Dingaling,
- self.classes.User,
- self.tables.dingalings,
- self.classes.Address,
- self.tables.addresses)
+ users, Dingaling, User, dingalings, Address, addresses = (
+ self.tables.users,
+ self.classes.Dingaling,
+ self.classes.User,
+ self.tables.dingalings,
+ self.classes.Address,
+ self.tables.addresses)
mapper(Dingaling, dingalings)
mapper(Address, addresses, properties={
- 'dingalings':relationship(Dingaling, order_by=Dingaling.id)
+ 'dingalings': relationship(Dingaling, order_by=Dingaling.id)
})
mapper(User, users, properties={
- 'addresses':relationship(
- Address,
- order_by=Address.id)
+ 'addresses': relationship(
+ Address,
+ order_by=Address.id)
})
sess = create_session()
def go():
eq_(
- [User(id=7, addresses=[
- Address(id=1, email_address='jack@bean.com')])],
- q.filter(u.id==7).all()
+ [User(id=7, addresses=[
+ Address(id=1, email_address='jack@bean.com')])],
+ q.filter(u.id == 7).all()
)
self.assert_sql_count(testing.db, go, 2)
self.assert_sql_count(testing.db, go, 2)
q = sess.query(u).\
- options(subqueryload_all(u.addresses, Address.dingalings))
+ options(subqueryload_all(u.addresses, Address.dingalings))
def go():
eq_(
[
User(id=8, addresses=[
- Address(id=2, email_address='ed@wood.com', dingalings=[Dingaling()]),
+ Address(id=2, email_address='ed@wood.com',
+ dingalings=[Dingaling()]),
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
]),
)
self.assert_sql_count(testing.db, go, 3)
-
def test_from_get(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(
- mapper(Address, addresses),
- order_by=Address.id)
+ 'addresses': relationship(
+ mapper(Address, addresses),
+ order_by=Address.id)
})
sess = create_session()
q = sess.query(User).options(subqueryload(User.addresses))
+
def go():
eq_(
- User(id=7, addresses=[
- Address(id=1, email_address='jack@bean.com')]),
- q.get(7)
+ User(id=7, addresses=[
+ Address(id=1, email_address='jack@bean.com')]),
+ q.get(7)
)
self.assert_sql_count(testing.db, go, 2)
def test_from_params(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(
- mapper(Address, addresses),
- order_by=Address.id)
+ 'addresses': relationship(
+ mapper(Address, addresses),
+ order_by=Address.id)
})
sess = create_session()
q = sess.query(User).options(subqueryload(User.addresses))
+
def go():
eq_(
- User(id=7, addresses=[
- Address(id=1, email_address='jack@bean.com')]),
- q.filter(User.id==bindparam('foo')).params(foo=7).one()
+ User(id=7, addresses=[
+ Address(id=1, email_address='jack@bean.com')]),
+ q.filter(User.id == bindparam('foo')).params(foo=7).one()
)
self.assert_sql_count(testing.db, go, 2)
"""test no subquery option on a dynamic."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, lazy="dynamic")
+ 'addresses': relationship(Address, lazy="dynamic")
})
mapper(Address, addresses)
sess = create_session()
# the query needlessly and put the result nowhere.
assert_raises_message(
sa.exc.InvalidRequestError,
- "User.addresses' does not support object population - eager loading cannot be applied.",
+ "User.addresses' does not support object population - eager "
+ "loading cannot be applied.",
sess.query(User).options(subqueryload(User.addresses)).first,
)
def test_many_to_many_plain(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
- mapper(Item, items, properties = dict(
- keywords = relationship(Keyword, secondary=item_keywords,
- lazy='subquery', order_by=keywords.c.id)))
+ mapper(Item, items, properties=dict(
+ keywords=relationship(Keyword, secondary=item_keywords,
+ lazy='subquery', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
+
def go():
eq_(self.static.item_keyword_result, q.all())
self.assert_sql_count(testing.db, go, 2)
def test_many_to_many_with_join(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
- mapper(Item, items, properties = dict(
- keywords = relationship(Keyword, secondary=item_keywords,
- lazy='subquery', order_by=keywords.c.id)))
+ mapper(Item, items, properties=dict(
+ keywords=relationship(Keyword, secondary=item_keywords,
+ lazy='subquery', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
+
def go():
eq_(self.static.item_keyword_result[0:2],
q.join('keywords').filter(Keyword.name == 'red').all())
self.assert_sql_count(testing.db, go, 2)
def test_many_to_many_with_join_alias(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
- mapper(Item, items, properties = dict(
- keywords = relationship(Keyword, secondary=item_keywords,
- lazy='subquery', order_by=keywords.c.id)))
+ mapper(Item, items, properties=dict(
+ keywords=relationship(Keyword, secondary=item_keywords,
+ lazy='subquery', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
+
def go():
eq_(self.static.item_keyword_result[0:2],
(q.join('keywords', aliased=True).
def test_orderby(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses':relationship(mapper(Address, addresses),
- lazy='subquery', order_by=addresses.c.email_address),
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses),
+ lazy='subquery',
+ order_by=addresses.c.email_address),
})
q = create_session().query(User)
eq_([
def test_orderby_multi(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
- mapper(User, users, properties = {
- 'addresses':relationship(mapper(Address, addresses),
- lazy='subquery',
- order_by=[
- addresses.c.email_address,
- addresses.c.id]),
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
+
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses),
+ lazy='subquery',
+ order_by=[
+ addresses.c.email_address,
+ addresses.c.id]),
})
q = create_session().query(User)
eq_([
order by a relationship to a second table"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = dict(
- addresses = relationship(Address,
- lazy='subquery',
- order_by=addresses.c.id),
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address,
+ lazy='subquery',
+ order_by=addresses.c.id),
))
q = create_session().query(User)
- l = q.filter(User.id==Address.user_id).\
+ result = q.filter(User.id == Address.user_id).\
order_by(Address.email_address).all()
eq_([
User(id=7, addresses=[
Address(id=1)
]),
- ], l)
+ ], result)
def test_orderby_desc(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy='subquery',
- order_by=[
- sa.desc(addresses.c.email_address)
- ]),
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address, lazy='subquery',
+ order_by=[
+ sa.desc(addresses.c.email_address)
+ ]),
))
sess = create_session()
eq_([
], sess.query(User).order_by(User.id).all())
_pathing_runs = [
- ( "lazyload", "lazyload", "lazyload", 15 ),
+ ("lazyload", "lazyload", "lazyload", 15),
("subqueryload", "lazyload", "lazyload", 12),
("subqueryload", "subqueryload", "lazyload", 8),
("joinedload", "subqueryload", "lazyload", 7),
self._do_mapper_test(self._pathing_runs)
def _do_options_test(self, configs):
- users, Keyword, orders, items, order_items, Order, Item, User, keywords, item_keywords = (self.tables.users,
- self.classes.Keyword,
- self.tables.orders,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.keywords,
- self.tables.item_keywords)
+ users, Keyword, orders, items, order_items, Order, Item, User, \
+ keywords, item_keywords = (self.tables.users,
+ self.classes.Keyword,
+ self.tables.orders,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.keywords,
+ self.tables.item_keywords)
mapper(User, users, properties={
- 'orders':relationship(Order, order_by=orders.c.id), # o2m, m2o
+ 'orders': relationship(Order, order_by=orders.c.id), # o2m, m2o
})
mapper(Order, orders, properties={
- 'items':relationship(Item,
- secondary=order_items, order_by=items.c.id), #m2m
+ 'items': relationship(Item,
+ secondary=order_items,
+ order_by=items.c.id), # m2m
})
mapper(Item, items, properties={
- 'keywords':relationship(Keyword,
- secondary=item_keywords,
- order_by=keywords.c.id) #m2m
+ 'keywords': relationship(Keyword,
+ secondary=item_keywords,
+ order_by=keywords.c.id) # m2m
})
mapper(Keyword, keywords)
callables = {
- 'joinedload':joinedload,
- 'subqueryload':subqueryload
- }
+ 'joinedload': joinedload,
+ 'subqueryload': subqueryload
+ }
for o, i, k, count in configs:
options = []
if i in callables:
options.append(callables[i](User.orders, Order.items))
if k in callables:
- options.append(callables[k](User.orders, Order.items, Item.keywords))
+ options.append(callables[k](
+ User.orders, Order.items, Item.keywords))
self._do_query_tests(options, count)
def _do_mapper_test(self, configs):
- users, Keyword, orders, items, order_items, Order, Item, User, keywords, item_keywords = (self.tables.users,
- self.classes.Keyword,
- self.tables.orders,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.keywords,
- self.tables.item_keywords)
+ users, Keyword, orders, items, order_items, Order, Item, User, \
+ keywords, item_keywords = (self.tables.users,
+ self.classes.Keyword,
+ self.tables.orders,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.keywords,
+ self.tables.item_keywords)
opts = {
- 'lazyload':'select',
- 'joinedload':'joined',
- 'subqueryload':'subquery',
+ 'lazyload': 'select',
+ 'joinedload': 'joined',
+ 'subqueryload': 'subquery',
}
for o, i, k, count in configs:
mapper(User, users, properties={
- 'orders':relationship(Order, lazy=opts[o], order_by=orders.c.id),
+ 'orders': relationship(Order, lazy=opts[o],
+ order_by=orders.c.id),
})
mapper(Order, orders, properties={
- 'items':relationship(Item,
- secondary=order_items, lazy=opts[i], order_by=items.c.id),
+ 'items': relationship(Item,
+ secondary=order_items, lazy=opts[i],
+ order_by=items.c.id),
})
mapper(Item, items, properties={
- 'keywords':relationship(Keyword,
- lazy=opts[k],
- secondary=item_keywords,
- order_by=keywords.c.id)
+ 'keywords': relationship(Keyword,
+ lazy=opts[k],
+ secondary=item_keywords,
+ order_by=keywords.c.id)
})
mapper(Keyword, keywords)
Order, User = self.classes.Order, self.classes.User
sess = create_session()
+
def go():
eq_(
sess.query(User).options(*opts).order_by(User.id).all(),
self.assert_sql_count(testing.db, go, count)
eq_(
- sess.query(User).options(*opts).filter(User.name=='fred').
- order_by(User.id).all(),
+ sess.query(User).options(*opts).filter(User.name == 'fred').
+ order_by(User.id).all(),
self.static.user_item_keyword_result[2:3]
)
sess = create_session()
eq_(
sess.query(User).options(*opts).join(User.orders).
- filter(Order.id==3).\
- order_by(User.id).all(),
+ filter(Order.id == 3).
+ order_by(User.id).all(),
self.static.user_item_keyword_result[0:1]
)
-
def test_cyclical(self):
"""A circular eager relationship breaks the cycle with a lazy loader"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
- mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy='subquery',
- backref=sa.orm.backref('user', lazy='subquery'),
- order_by=Address.id)
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address, lazy='subquery',
+ backref=sa.orm.backref(
+ 'user', lazy='subquery'),
+ order_by=Address.id)
))
- is_(sa.orm.class_mapper(User).get_property('addresses').lazy, 'subquery')
+ is_(sa.orm.class_mapper(User).get_property('addresses').lazy,
+ 'subquery')
is_(sa.orm.class_mapper(Address).get_property('user').lazy, 'subquery')
sess = create_session()
- eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all())
+ eq_(self.static.user_address_result,
+ sess.query(User).order_by(User.id).all())
def test_double(self):
"""Eager loading with two relationships simultaneously,
from the same table, using aliases."""
- users, orders, User, Address, Order, addresses = (self.tables.users,
- self.tables.orders,
- self.classes.User,
- self.classes.Address,
- self.classes.Order,
- self.tables.addresses)
-
+ users, orders, User, Address, Order, addresses = (
+ self.tables.users,
+ self.tables.orders,
+ self.classes.User,
+ self.classes.Address,
+ self.classes.Order,
+ self.tables.addresses)
openorders = sa.alias(orders, 'openorders')
closedorders = sa.alias(orders, 'closedorders')
open_mapper = mapper(Order, openorders, non_primary=True)
closed_mapper = mapper(Order, closedorders, non_primary=True)
- mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy='subquery',
- order_by=addresses.c.id),
- open_orders = relationship(
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address, lazy='subquery',
+ order_by=addresses.c.id),
+ open_orders=relationship(
open_mapper,
primaryjoin=sa.and_(openorders.c.isopen == 1,
- users.c.id==openorders.c.user_id),
+ users.c.id == openorders.c.user_id),
lazy='subquery', order_by=openorders.c.id),
- closed_orders = relationship(
+ closed_orders=relationship(
closed_mapper,
primaryjoin=sa.and_(closedorders.c.isopen == 0,
- users.c.id==closedorders.c.user_id),
+ users.c.id == closedorders.c.user_id),
lazy='subquery', order_by=closedorders.c.id)))
q = create_session().query(User).order_by(User.id)
User(
id=7,
addresses=[Address(id=1)],
- open_orders = [Order(id=3)],
- closed_orders = [Order(id=1), Order(id=5)]
+ open_orders=[Order(id=3)],
+ closed_orders=[Order(id=1), Order(id=5)]
),
User(
id=8,
addresses=[Address(id=2), Address(id=3), Address(id=4)],
- open_orders = [],
- closed_orders = []
+ open_orders=[],
+ closed_orders=[]
),
User(
id=9,
addresses=[Address(id=5)],
- open_orders = [Order(id=4)],
- closed_orders = [Order(id=2)]
+ open_orders=[Order(id=4)],
+ closed_orders=[Order(id=2)]
),
User(id=10)
"""Eager loading with two relationships simultaneously,
from the same table, using aliases."""
- addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Address,
- self.classes.Order,
- self.tables.users)
-
+ addresses, items, order_items, orders, Item, User, Address, Order, \
+ users = (self.tables.addresses,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Address,
+ self.classes.Order,
+ self.tables.users)
mapper(Address, addresses)
mapper(Order, orders, properties={
'items': relationship(Item, secondary=order_items, lazy='subquery',
- order_by=items.c.id)})
+ order_by=items.c.id)})
mapper(Item, items)
mapper(User, users, properties=dict(
- addresses=relationship(Address, lazy='subquery', order_by=addresses.c.id),
+ addresses=relationship(
+ Address, lazy='subquery', order_by=addresses.c.id),
open_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 1,
- users.c.id==orders.c.user_id),
+ users.c.id == orders.c.user_id),
lazy='subquery', order_by=orders.c.id),
closed_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 0,
- users.c.id==orders.c.user_id),
+ users.c.id == orders.c.user_id),
lazy='subquery', order_by=orders.c.id)))
q = create_session().query(User).order_by(User.id)
eq_([
User(id=7,
addresses=[
- Address(id=1)],
+ Address(id=1)],
open_orders=[Order(id=3,
items=[
- Item(id=3),
- Item(id=4),
- Item(id=5)])],
+ Item(id=3),
+ Item(id=4),
+ Item(id=5)])],
closed_orders=[Order(id=1,
items=[
- Item(id=1),
- Item(id=2),
- Item(id=3)]),
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)]),
Order(id=5,
items=[
- Item(id=5)])]),
+ Item(id=5)])]),
User(id=8,
addresses=[
- Address(id=2),
- Address(id=3),
- Address(id=4)],
- open_orders = [],
- closed_orders = []),
+ Address(id=2),
+ Address(id=3),
+ Address(id=4)],
+ open_orders=[],
+ closed_orders=[]),
User(id=9,
addresses=[
- Address(id=5)],
+ Address(id=5)],
open_orders=[
- Order(id=4,
- items=[
- Item(id=1),
- Item(id=5)])],
+ Order(id=4,
+ items=[
+ Item(id=1),
+ Item(id=5)])],
closed_orders=[
- Order(id=2,
- items=[
- Item(id=1),
- Item(id=2),
- Item(id=3)])]),
+ Order(id=2,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)])]),
User(id=10)
], q.all())
self.assert_sql_count(testing.db, go, 6)
def test_limit(self):
"""Limit operations combined with lazy-load relationships."""
- users, items, order_items, orders, Item, User, Address, Order, addresses = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Address,
- self.classes.Order,
- self.tables.addresses)
-
+ users, items, order_items, orders, Item, User, Address, Order, \
+ addresses = (self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Address,
+ self.classes.Order,
+ self.tables.addresses)
mapper(Item, items)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, lazy='subquery',
- order_by=items.c.id)
+ 'items': relationship(Item, secondary=order_items, lazy='subquery',
+ order_by=items.c.id)
})
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
- lazy='subquery',
- order_by=addresses.c.id),
- 'orders':relationship(Order, lazy='select', order_by=orders.c.id)
+ 'addresses': relationship(mapper(Address, addresses),
+ lazy='subquery',
+ order_by=addresses.c.id),
+ 'orders': relationship(Order, lazy='select', order_by=orders.c.id)
})
sess = create_session()
q = sess.query(User)
- l = q.order_by(User.id).limit(2).offset(1).all()
- eq_(self.static.user_all_result[1:3], l)
+ result = q.order_by(User.id).limit(2).offset(1).all()
+ eq_(self.static.user_all_result[1:3], result)
sess = create_session()
- l = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
- eq_(list(reversed(self.static.user_all_result[0:2])), l)
+ result = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
+ eq_(list(reversed(self.static.user_all_result[0:2])), result)
@testing.uses_deprecated("Mapper.order_by")
def test_mapper_order_by(self):
users, User, Address, addresses = (self.tables.users,
- self.classes.User,
- self.classes.Address,
- self.tables.addresses)
+ self.classes.User,
+ self.classes.Address,
+ self.tables.addresses)
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- lazy='subquery',
- order_by=addresses.c.id),
- },order_by=users.c.id.desc())
+ 'addresses': relationship(Address,
+ lazy='subquery',
+ order_by=addresses.c.id),
+ }, order_by=users.c.id.desc())
sess = create_session()
q = sess.query(User)
- l = q.limit(2).all()
- eq_(l, list(reversed(self.static.user_address_result[2:4])))
-
+ result = q.limit(2).all()
+ eq_(result, list(reversed(self.static.user_address_result[2:4])))
def test_one_to_many_scalar(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
- mapper(User, users, properties = dict(
- address = relationship(mapper(Address, addresses),
- lazy='subquery', uselist=False)
+ mapper(User, users, properties=dict(
+ address=relationship(mapper(Address, addresses),
+ lazy='subquery', uselist=False)
))
q = create_session().query(User)
def go():
- l = q.filter(users.c.id == 7).all()
- eq_([User(id=7, address=Address(id=1))], l)
+ result = q.filter(users.c.id == 7).all()
+ eq_([User(id=7, address=Address(id=1))], result)
self.assert_sql_count(testing.db, go, 2)
def test_many_to_one(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(Address, addresses, properties = dict(
- user = relationship(mapper(User, users), lazy='subquery')
+ mapper(Address, addresses, properties=dict(
+ user=relationship(mapper(User, users), lazy='subquery')
))
sess = create_session()
q = sess.query(Address)
def go():
- a = q.filter(addresses.c.id==1).one()
+ a = q.filter(addresses.c.id == 1).one()
is_not_(a.user, None)
u1 = sess.query(User).get(7)
is_(a.user, u1)
def test_double_with_aggregate(self):
User, users, orders, Order = (self.classes.User,
- self.tables.users,
- self.tables.orders,
- self.classes.Order)
+ self.tables.users,
+ self.tables.orders,
+ self.classes.Order)
- max_orders_by_user = sa.select([sa.func.max(orders.c.id).label('order_id')],
- group_by=[orders.c.user_id]
- ).alias('max_orders_by_user')
+ max_orders_by_user = sa.select([sa.func.max(orders.c.id)
+ .label('order_id')],
+ group_by=[orders.c.user_id]) \
+ .alias('max_orders_by_user')
- max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).\
- alias('max_orders')
+ max_orders = orders.select(
+ orders.c.id == max_orders_by_user.c.order_id).\
+ alias('max_orders')
mapper(Order, orders)
mapper(User, users, properties={
- 'orders':relationship(Order, backref='user', lazy='subquery',
- order_by=orders.c.id),
- 'max_order':relationship(
- mapper(Order, max_orders, non_primary=True),
- lazy='subquery', uselist=False)
+ 'orders': relationship(Order, backref='user', lazy='subquery',
+ order_by=orders.c.id),
+ 'max_order': relationship(
+ mapper(Order, max_orders, non_primary=True),
+ lazy='subquery', uselist=False)
})
q = create_session().query(User)
def go():
eq_([
User(id=7, orders=[
- Order(id=1),
- Order(id=3),
- Order(id=5),
- ],
+ Order(id=1),
+ Order(id=3),
+ Order(id=5),
+ ],
max_order=Order(id=5)
),
User(id=8, orders=[]),
- User(id=9, orders=[Order(id=2),Order(id=4)],
- max_order=Order(id=4)
- ),
+ User(id=9, orders=[Order(id=2), Order(id=4)],
+ max_order=Order(id=4)),
User(id=10),
], q.order_by(User.id).all())
self.assert_sql_count(testing.db, go, 3)
uselist=False raises a warning."""
User, users, orders, Order = (self.classes.User,
- self.tables.users,
- self.tables.orders,
- self.classes.Order)
-
+ self.tables.users,
+ self.tables.orders,
+ self.classes.Order)
mapper(User, users, properties={
- 'order':relationship(Order, uselist=False)
+ 'order': relationship(Order, uselist=False)
})
mapper(Order, orders)
s = create_session()
assert_raises(sa.exc.SAWarning,
- s.query(User).options(subqueryload(User.order)).all)
+ s.query(User).options(subqueryload(User.order)).all)
+
class LoadOnExistingTest(_fixtures.FixtureTest):
"""test that loaders from a base Query fully populate."""
User, Address, Dingaling = self.classes.User, \
self.classes.Address, self.classes.Dingaling
mapper(User, self.tables.users, properties={
- 'addresses':relationship(Address),
+ 'addresses': relationship(Address),
})
mapper(Address, self.tables.addresses, properties={
- 'dingaling':relationship(Dingaling)
+ 'dingaling': relationship(Dingaling)
})
mapper(Dingaling, self.tables.dingalings)
User, Order, Item = self.classes.User, \
self.classes.Order, self.classes.Item
mapper(User, self.tables.users, properties={
- 'orders':relationship(Order),
+ 'orders': relationship(Order),
})
mapper(Order, self.tables.orders, properties={
- 'items':relationship(Item, secondary=self.tables.order_items),
+ 'items': relationship(Item, secondary=self.tables.order_items),
})
mapper(Item, self.tables.items)
def _eager_config_fixture(self):
User, Address = self.classes.User, self.classes.Address
mapper(User, self.tables.users, properties={
- 'addresses':relationship(Address, lazy="subquery"),
+ 'addresses': relationship(Address, lazy="subquery"),
})
mapper(Address, self.tables.addresses)
sess = Session(autoflush=False)
def _deferred_config_fixture(self):
User, Address = self.classes.User, self.classes.Address
mapper(User, self.tables.users, properties={
- 'name':deferred(self.tables.users.c.name),
- 'addresses':relationship(Address, lazy="subquery"),
+ 'name': deferred(self.tables.users.c.name),
+ 'addresses': relationship(Address, lazy="subquery"),
})
mapper(Address, self.tables.addresses)
sess = Session(autoflush=False)
u1 = sess.query(User).get(8)
assert 'addresses' in u1.__dict__
sess.expire(u1)
+
def go():
eq_(u1.id, 8)
self.assert_sql_count(testing.db, go, 1)
u1 = sess.query(User).get(8)
assert 'addresses' in u1.__dict__
sess.expire(u1, ['addresses'])
+
def go():
eq_(u1.name, 'ed')
self.assert_sql_count(testing.db, go, 1)
a2 = u1.addresses[0]
a2.email_address = 'foo'
sess.query(User).options(subqueryload_all("addresses.dingaling")).\
- filter_by(id=8).all()
+ filter_by(id=8).all()
assert u1.addresses[-1] is a1
for a in u1.addresses:
if a is not a1:
o1 = Order()
u1.orders.append(o1)
sess.query(User).options(subqueryload_all("orders.items")).\
- filter_by(id=7).all()
+ filter_by(id=7).all()
for o in u1.orders:
if o is not o1:
assert 'items' in o.__dict__
def test_load_two_levels_collection_to_scalar(self):
User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
- u1 = sess.query(User).filter_by(id=8).options(subqueryload("addresses")).one()
- sess.query(User).filter_by(id=8).options(subqueryload_all("addresses.dingaling")).first()
+ u1 = sess.query(User).filter_by(id=8).options(
+ subqueryload("addresses")).one()
+ sess.query(User).filter_by(id=8).options(
+ subqueryload_all("addresses.dingaling")).first()
assert 'dingaling' in u1.addresses[0].__dict__
def test_load_two_levels_collection_to_collection(self):
User, Order, Item, sess = self._collection_to_collection_fixture()
- u1 = sess.query(User).filter_by(id=7).options(subqueryload("orders")).one()
- sess.query(User).filter_by(id=7).options(subqueryload_all("orders.items")).first()
+ u1 = sess.query(User).filter_by(id=7).options(
+ subqueryload("orders")).one()
+ sess.query(User).filter_by(id=7).options(
+ subqueryload_all("orders.items")).first()
assert 'items' in u1.orders[0].__dict__
+
class OrderBySecondaryTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('m2m', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('aid', Integer, ForeignKey('a.id')),
Column('bid', Integer, ForeignKey('b.id')))
Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('b', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
@classmethod
def test_ordering(self):
a, m2m, b = (self.tables.a,
- self.tables.m2m,
- self.tables.b)
+ self.tables.m2m,
+ self.tables.b)
- class A(fixtures.ComparableEntity):pass
- class B(fixtures.ComparableEntity):pass
+ class A(fixtures.ComparableEntity):
+ pass
+
+ class B(fixtures.ComparableEntity):
+ pass
mapper(A, a, properties={
- 'bs':relationship(B, secondary=m2m, lazy='subquery', order_by=m2m.c.id)
+ 'bs': relationship(B, secondary=m2m, lazy='subquery',
+ order_by=m2m.c.id)
})
mapper(B, b)
sess = create_session()
+
def go():
eq_(sess.query(A).all(), [
- A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
- A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
+ A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
+ A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
])
self.assert_sql_count(testing.db, go, 2)
-from .inheritance._poly_fixtures import _Polymorphic, Person, Engineer, \
- Paperwork, Machine, MachineType, Company
-
class BaseRelationFromJoinedSubclassTest(_Polymorphic):
@classmethod
def define_tables(cls, metadata):
people = Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
# to test fully, PK of engineers table must be
# named differently from that of people
engineers = Table('engineers', metadata,
- Column('engineer_id', Integer,
- ForeignKey('people.person_id'),
- primary_key=True),
- Column('primary_language', String(50)))
+ Column('engineer_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('primary_language', String(50)))
paperwork = Table('paperwork', metadata,
- Column('paperwork_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('description', String(50)),
- Column('person_id', Integer,
- ForeignKey('people.person_id')))
+ Column('paperwork_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('description', String(50)),
+ Column('person_id', Integer,
+ ForeignKey('people.person_id')))
@classmethod
def setup_mappers(cls):
paperwork = cls.tables.paperwork
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- properties={
- 'paperwork': relationship(
- Paperwork, order_by=paperwork.c.paperwork_id)})
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ properties={
+ 'paperwork': relationship(
+ Paperwork, order_by=paperwork.c.paperwork_id)})
mapper(Engineer, engineers,
- inherits=Person,
- polymorphic_identity='engineer')
+ inherits=Person,
+ polymorphic_identity='engineer')
mapper(Paperwork, paperwork)
# use Person.paperwork here just to give the least
# amount of context
q = sess.query(Engineer).\
- filter(Engineer.primary_language == 'java').\
- options(subqueryload(Person.paperwork))
+ filter(Engineer.primary_language == 'java').\
+ options(subqueryload(Person.paperwork))
+
def go():
eq_(q.all()[0].paperwork,
- [Paperwork(description="tps report #1"),
- Paperwork(description="tps report #2")],
+ [Paperwork(description="tps report #1"),
+ Paperwork(description="tps report #2")],
)
self.assert_sql_execution(
- testing.db,
- go,
- CompiledSQL(
- "SELECT people.person_id AS people_person_id, "
- "people.name AS people_name, people.type AS people_type, "
- "engineers.engineer_id AS engineers_engineer_id, "
- "engineers.primary_language AS engineers_primary_language "
- "FROM people JOIN engineers ON "
- "people.person_id = engineers.engineer_id "
- "WHERE engineers.primary_language = :primary_language_1",
- {"primary_language_1": "java"}
- ),
- # ensure we get "people JOIN engineer" here, even though
- # primary key "people.person_id" is against "Person"
- # *and* the path comes out as "Person.paperwork", still
- # want to select from "Engineer" entity
- CompiledSQL(
- "SELECT paperwork.paperwork_id AS paperwork_paperwork_id, "
- "paperwork.description AS paperwork_description, "
- "paperwork.person_id AS paperwork_person_id, "
- "anon_1.people_person_id AS anon_1_people_person_id "
- "FROM (SELECT people.person_id AS people_person_id "
- "FROM people JOIN engineers "
- "ON people.person_id = engineers.engineer_id "
- "WHERE engineers.primary_language = "
- ":primary_language_1) AS anon_1 "
- "JOIN paperwork "
- "ON anon_1.people_person_id = paperwork.person_id "
- "ORDER BY anon_1.people_person_id, paperwork.paperwork_id",
- {"primary_language_1": "java"}
- )
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT people.person_id AS people_person_id, "
+ "people.name AS people_name, people.type AS people_type, "
+ "engineers.engineer_id AS engineers_engineer_id, "
+ "engineers.primary_language AS engineers_primary_language "
+ "FROM people JOIN engineers ON "
+ "people.person_id = engineers.engineer_id "
+ "WHERE engineers.primary_language = :primary_language_1",
+ {"primary_language_1": "java"}
+ ),
+ # ensure we get "people JOIN engineer" here, even though
+ # primary key "people.person_id" is against "Person"
+ # *and* the path comes out as "Person.paperwork", still
+ # want to select from "Engineer" entity
+ CompiledSQL(
+ "SELECT paperwork.paperwork_id AS paperwork_paperwork_id, "
+ "paperwork.description AS paperwork_description, "
+ "paperwork.person_id AS paperwork_person_id, "
+ "anon_1.people_person_id AS anon_1_people_person_id "
+ "FROM (SELECT people.person_id AS people_person_id "
+ "FROM people JOIN engineers "
+ "ON people.person_id = engineers.engineer_id "
+ "WHERE engineers.primary_language = "
+ ":primary_language_1) AS anon_1 "
+ "JOIN paperwork "
+ "ON anon_1.people_person_id = paperwork.person_id "
+ "ORDER BY anon_1.people_person_id, paperwork.paperwork_id",
+ {"primary_language_1": "java"}
+ )
)
def test_correct_subquery_existingfrom(self):
# use Person.paperwork here just to give the least
# amount of context
q = sess.query(Engineer).\
- filter(Engineer.primary_language == 'java').\
- join(Engineer.paperwork).\
- filter(Paperwork.description == "tps report #2").\
- options(subqueryload(Person.paperwork))
+ filter(Engineer.primary_language == 'java').\
+ join(Engineer.paperwork).\
+ filter(Paperwork.description == "tps report #2").\
+ options(subqueryload(Person.paperwork))
+
def go():
eq_(q.one().paperwork,
- [Paperwork(description="tps report #1"),
- Paperwork(description="tps report #2")],
+ [Paperwork(description="tps report #1"),
+ Paperwork(description="tps report #2")],
)
self.assert_sql_execution(
"engineers.engineer_id AS engineers_engineer_id, "
"engineers.primary_language AS engineers_primary_language "
"FROM people JOIN engineers "
- "ON people.person_id = engineers.engineer_id "
- "JOIN paperwork ON people.person_id = paperwork.person_id "
+ "ON people.person_id = engineers.engineer_id "
+ "JOIN paperwork ON people.person_id = paperwork.person_id "
"WHERE engineers.primary_language = :primary_language_1 "
"AND paperwork.description = :description_1",
{"primary_language_1": "java",
wp = with_polymorphic(Person, [Engineer])
q = sess.query(wp).\
- options(subqueryload(wp.paperwork)).\
- order_by(Engineer.primary_language.desc())
+ options(subqueryload(wp.paperwork)).\
+ order_by(Engineer.primary_language.desc())
def go():
eq_(q.first(),
Paperwork(description="tps report #1"),
Paperwork(description="tps report #2")],
primary_language='java'
- )
+ )
)
self.assert_sql_execution(
wp = with_polymorphic(Person, [Engineer], aliased=True)
q = sess.query(wp).\
- options(subqueryload(wp.paperwork)).\
- order_by(wp.Engineer.primary_language.desc())
+ options(subqueryload(wp.paperwork)).\
+ order_by(wp.Engineer.primary_language.desc())
def go():
eq_(q.first(),
Paperwork(description="tps report #1"),
Paperwork(description="tps report #2")],
primary_language='java'
- )
+ )
)
self.assert_sql_execution(
"SELECT anon_1.people_person_id AS anon_1_people_person_id, "
"anon_1.people_name AS anon_1_people_name, "
"anon_1.people_type AS anon_1_people_type, "
- "anon_1.engineers_engineer_id AS anon_1_engineers_engineer_id, "
+ "anon_1.engineers_engineer_id AS "
+ "anon_1_engineers_engineer_id, "
"anon_1.engineers_primary_language "
"AS anon_1_engineers_primary_language FROM "
"(SELECT people.person_id AS people_person_id, "
wp = with_polymorphic(Person, [Engineer], aliased=True, flat=True)
q = sess.query(wp).\
- options(subqueryload(wp.paperwork)).\
- order_by(wp.Engineer.primary_language.desc())
+ options(subqueryload(wp.paperwork)).\
+ order_by(wp.Engineer.primary_language.desc())
def go():
eq_(q.first(),
Paperwork(description="tps report #1"),
Paperwork(description="tps report #2")],
primary_language='java'
- )
+ )
)
self.assert_sql_execution(
)
)
+
class SubRelationFromJoinedSubclassMultiLevelTest(_Polymorphic):
@classmethod
def define_tables(cls, metadata):
Table('companies', metadata,
- Column('company_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('company_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
Table('people', metadata,
- Column('person_id', Integer,
- primary_key=True,
- test_needs_autoincrement=True),
- Column('company_id', ForeignKey('companies.company_id')),
- Column('name', String(50)),
- Column('type', String(30)))
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('company_id', ForeignKey('companies.company_id')),
+ Column('name', String(50)),
+ Column('type', String(30)))
Table('engineers', metadata,
- Column('engineer_id', ForeignKey('people.person_id'),
- primary_key=True),
- Column('primary_language', String(50)))
+ Column('engineer_id', ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('primary_language', String(50)))
Table('machines', metadata,
- Column('machine_id',
- Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('engineer_id', ForeignKey('engineers.engineer_id')),
- Column('machine_type_id',
- ForeignKey('machine_type.machine_type_id')))
+ Column('machine_id',
+ Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('engineer_id', ForeignKey('engineers.engineer_id')),
+ Column('machine_type_id',
+ ForeignKey('machine_type.machine_type_id')))
Table('machine_type', metadata,
- Column('machine_type_id',
- Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)))
+ Column('machine_type_id',
+ Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)))
@classmethod
def setup_mappers(cls):
machine_type = cls.tables.machine_type
mapper(Company, companies, properties={
- 'employees': relationship(Person, order_by=people.c.person_id)
- })
+ 'employees': relationship(Person, order_by=people.c.person_id)
+ })
mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- with_polymorphic='*')
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ with_polymorphic='*')
mapper(Engineer, engineers,
- inherits=Person,
- polymorphic_identity='engineer', properties={
- 'machines': relationship(Machine,
- order_by=machines.c.machine_id)
- })
+ inherits=Person,
+ polymorphic_identity='engineer', properties={
+ 'machines': relationship(Machine,
+ order_by=machines.c.machine_id)
+ })
mapper(Machine, machines, properties={
- 'type': relationship(MachineType)
- })
+ 'type': relationship(MachineType)
+ })
mapper(MachineType, machine_type)
-
@classmethod
def insert_data(cls):
c1 = cls._fixture()
mt1 = MachineType(name='mt1')
mt2 = MachineType(name='mt2')
return Company(
- employees=[
- Engineer(
- name='e1',
- machines=[
- Machine(name='m1', type=mt1),
- Machine(name='m2', type=mt2)
- ]
- ),
- Engineer(
- name='e2',
- machines=[
- Machine(name='m3', type=mt1),
- Machine(name='m4', type=mt1)
- ]
- )
- ])
+ employees=[
+ Engineer(
+ name='e1',
+ machines=[
+ Machine(name='m1', type=mt1),
+ Machine(name='m2', type=mt2)
+ ]
+ ),
+ Engineer(
+ name='e2',
+ machines=[
+ Machine(name='m3', type=mt1),
+ Machine(name='m4', type=mt1)
+ ]
+ )
+ ])
def test_chained_subq_subclass(self):
s = Session()
q = s.query(Company).options(
- subqueryload(Company.employees.of_type(Engineer)).
- subqueryload(Engineer.machines).
- subqueryload(Machine.type)
- )
+ subqueryload(Company.employees.of_type(Engineer)).
+ subqueryload(Engineer.machines).
+ subqueryload(Machine.type)
+ )
def go():
eq_(
@classmethod
def define_tables(cls, metadata):
Table('nodes', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id')),
- Column('data', String(30)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')),
+ Column('data', String(30)))
def test_basic(self):
nodes = self.tables.nodes
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node,
- lazy='subquery',
- join_depth=3, order_by=nodes.c.id)
+ 'children': relationship(Node,
+ lazy='subquery',
+ join_depth=3, order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
sess.add(n2)
sess.flush()
sess.expunge_all()
+
def go():
d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).\
- order_by(Node.data).all()
+ order_by(Node.data).all()
eq_([Node(data='n1', children=[
- Node(data='n11'),
- Node(data='n12', children=[
- Node(data='n121'),
- Node(data='n122'),
- Node(data='n123')
- ]),
- Node(data='n13')
+ Node(data='n11'),
+ Node(data='n12', children=[
+ Node(data='n121'),
+ Node(data='n122'),
+ Node(data='n123')
]),
+ Node(data='n13')
+ ]),
Node(data='n2', children=[
Node(data='n21', children=[
Node(data='n211'),
], d)
self.assert_sql_count(testing.db, go, 4)
-
-
def test_lazy_fallback_doesnt_affect_eager(self):
nodes = self.tables.nodes
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy='subquery', join_depth=1,
- order_by=nodes.c.id)
+ 'children': relationship(Node, lazy='subquery', join_depth=1,
+ order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy='subquery', join_depth=3,
- order_by=nodes.c.id),
- 'data':deferred(nodes.c.data)
+ 'children': relationship(Node, lazy='subquery', join_depth=3,
+ order_by=nodes.c.id),
+ 'data': deferred(nodes.c.data)
})
sess = create_session()
n1 = Node(data='n1')
eq_(
Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).order_by(Node.id).first(),
- )
+ )
self.assert_sql_count(testing.db, go, 6)
sess.expunge_all()
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
- sess.query(Node).options(undefer('data')).order_by(Node.id).first())
+ sess.query(Node).options(undefer('data')).order_by(Node.id)
+ .first())
self.assert_sql_count(testing.db, go, 5)
sess.expunge_all()
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).options(undefer('data'),
- undefer('children.data')).first())
+ undefer('children.data')).first())
self.assert_sql_count(testing.db, go, 3)
-
def test_options(self):
nodes = self.tables.nodes
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, order_by=nodes.c.id)
+ 'children': relationship(Node, order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
sess.add(n1)
sess.flush()
sess.expunge_all()
+
def go():
d = sess.query(Node).filter_by(data='n1').order_by(Node.id).\
- options(subqueryload_all('children.children')).first()
+ options(subqueryload_all('children.children')).first()
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy='subquery')
+ 'children': relationship(Node, lazy='subquery')
})
sess = create_session()
n1 = Node(data='n1')
sess.add(n2)
sess.flush()
sess.expunge_all()
+
def go():
- d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).order_by(Node.data).all()
+ d = sess.query(Node).filter(Node.data.in_(
+ ['n1', 'n2'])).order_by(Node.data).all()
eq_([
Node(data='n1', children=[
Node(data='n11'),
], d)
self.assert_sql_count(testing.db, go, 4)
+
class InheritanceToRelatedTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column("id", Integer, primary_key=True),
- Column("type", String(50)),
- Column("related_id", Integer, ForeignKey("related.id"))
- )
+ Column("id", Integer, primary_key=True),
+ Column("type", String(50)),
+ Column("related_id", Integer, ForeignKey("related.id")))
Table("bar", metadata,
- Column("id", Integer, ForeignKey('foo.id'), primary_key=True),
- )
+ Column("id", Integer, ForeignKey('foo.id'), primary_key=True))
Table("baz", metadata,
- Column("id", Integer, ForeignKey('foo.id'), primary_key=True),
- )
+ Column("id", Integer, ForeignKey('foo.id'), primary_key=True))
Table("related", metadata,
- Column("id", Integer, primary_key=True),
- )
+ Column("id", Integer, primary_key=True))
@classmethod
def setup_classes(cls):
class Foo(cls.Comparable):
pass
+
class Bar(Foo):
pass
+
class Baz(Foo):
pass
+
class Related(cls.Comparable):
pass
(2,)
]
)
+
@classmethod
def setup_mappers(cls):
mapper(cls.classes.Foo, cls.tables.foo, properties={
'related': relationship(cls.classes.Related)
}, polymorphic_on=cls.tables.foo.c.type)
mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar',
- inherits=cls.classes.Foo)
+ inherits=cls.classes.Foo)
mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz',
- inherits=cls.classes.Foo)
+ inherits=cls.classes.Foo)
mapper(cls.classes.Related, cls.tables.related)
def test_caches_query_per_base_subq(self):
Foo, Bar, Baz, Related = self.classes.Foo, self.classes.Bar, \
- self.classes.Baz, self.classes.Related
+ self.classes.Baz, self.classes.Related
s = Session(testing.db)
+
def go():
eq_(
- s.query(Foo).with_polymorphic([Bar, Baz]).\
- order_by(Foo.id).\
- options(subqueryload(Foo.related)).all(),
+ s.query(Foo).with_polymorphic([Bar, Baz]).
+ order_by(Foo.id).
+ options(subqueryload(Foo.related)).all(),
[
Bar(id=1, related=Related(id=1)),
Bar(id=2, related=Related(id=2)),
def test_caches_query_per_base_joined(self):
# technically this should be in test_eager_relations
Foo, Bar, Baz, Related = self.classes.Foo, self.classes.Bar, \
- self.classes.Baz, self.classes.Related
+ self.classes.Baz, self.classes.Related
s = Session(testing.db)
+
def go():
eq_(
- s.query(Foo).with_polymorphic([Bar, Baz]).\
- order_by(Foo.id).\
- options(joinedload(Foo.related)).all(),
+ s.query(Foo).with_polymorphic([Bar, Baz]).
+ order_by(Foo.id).
+ options(joinedload(Foo.related)).all(),
[
Bar(id=1, related=Related(id=1)),
Bar(id=2, related=Related(id=2)),
)
self.assert_sql_count(testing.db, go, 1)
+
class CyclicalInheritingEagerTestOne(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('c2', String(30)),
- Column('type', String(30))
- )
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('c2', String(30)),
+ Column('type', String(30)))
Table('t2', metadata,
- Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('c2', String(30)),
- Column('type', String(30)),
- Column('t1.id', Integer, ForeignKey('t1.c1')))
+ Column('c1', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('c2', String(30)),
+ Column('type', String(30)),
+ Column('t1.id', Integer, ForeignKey('t1.c1')))
def test_basic(self):
t2, t1 = self.tables.t2, self.tables.t1
pass
mapper(T, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1')
- mapper(SubT, None, inherits=T, polymorphic_identity='subt1', properties={
- 't2s': relationship(SubT2, lazy='subquery',
- backref=sa.orm.backref('subt', lazy='subquery'))
- })
+ mapper(SubT, None, inherits=T, polymorphic_identity='subt1',
+ properties={'t2s': relationship(
+ SubT2, lazy='subquery',
+ backref=sa.orm.backref('subt', lazy='subquery'))})
mapper(T2, t2, polymorphic_on=t2.c.type, polymorphic_identity='t2')
mapper(SubT2, None, inherits=T2, polymorphic_identity='subt2')
# testing a particular endless loop condition in eager load setup
create_session().query(SubT).all()
+
class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
- testing.AssertsCompiledSQL):
+ testing.AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
+
class PersistentObject(Base):
__tablename__ = 'persistent'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
class Movie(PersistentObject):
__tablename__ = 'movie'
movies = relationship("Movie", foreign_keys=Movie.director_id)
name = Column(String(50))
-
def test_from_subclass(self):
Director = self.classes.Director
ctx = s.query(Director).options(subqueryload('*'))._compile_context()
q = ctx.attributes[('subquery',
- (inspect(Director), inspect(Director).attrs.movies))]
+ (inspect(Director),
+ inspect(Director).attrs.movies))]
self.assert_compile(q,
- "SELECT movie.id AS movie_id, persistent.id AS persistent_id, "
- "movie.director_id AS movie_director_id, "
- "movie.title AS movie_title, "
- "anon_1.director_id AS anon_1_director_id "
- "FROM (SELECT director.id AS director_id "
- "FROM persistent JOIN director "
- "ON persistent.id = director.id) AS anon_1 "
- "JOIN (persistent JOIN movie ON persistent.id = movie.id) "
- "ON anon_1.director_id = movie.director_id "
- "ORDER BY anon_1.director_id",
- dialect="default"
- )
+ "SELECT movie.id AS movie_id, "
+ "persistent.id AS persistent_id, "
+ "movie.director_id AS movie_director_id, "
+ "movie.title AS movie_title, "
+ "anon_1.director_id AS anon_1_director_id "
+ "FROM (SELECT director.id AS director_id "
+ "FROM persistent JOIN director "
+ "ON persistent.id = director.id) AS anon_1 "
+ "JOIN (persistent JOIN movie "
+ "ON persistent.id = movie.id) "
+ "ON anon_1.director_id = movie.director_id "
+ "ORDER BY anon_1.director_id",
+ dialect="default"
+ )
def test_integrate(self):
Director = self.classes.Director
class Director(Base):
__tablename__ = 'director'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
class DirectorPhoto(Base):
__tablename__ = 'director_photo'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
path = Column(String(255))
director_id = Column(Integer, ForeignKey('director.id'))
director = relationship(Director, backref="photos")
class Movie(Base):
__tablename__ = 'movie'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
director_id = Column(Integer, ForeignKey('director.id'))
director = relationship(Director, backref="movies")
title = Column(String(50))
class Credit(Base):
__tablename__ = 'credit'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
movie_id = Column(Integer, ForeignKey('movie.id'))
@classmethod
self._run_test_m2o(None, False)
def _run_test_m2o(self,
- director_strategy_level,
- photo_strategy_level):
+ director_strategy_level,
+ photo_strategy_level):
# test where the innermost is m2o, e.g.
# Movie->director
'FROM movie) AS anon_1 '
'JOIN director ON director.id = anon_1.movie_director_id '
'ORDER BY anon_1.movie_director_id' % (
- " DISTINCT" if expect_distinct else "")
+ " DISTINCT" if expect_distinct else "")
)
ctx2 = q2._compile_context()
'director_1.id AS director_1_id '
'FROM (SELECT%s movie.director_id AS movie_director_id '
'FROM movie) AS anon_1 '
- 'JOIN director AS director_1 ON director_1.id = anon_1.movie_director_id '
- 'JOIN director_photo ON director_1.id = director_photo.director_id '
+ 'JOIN director AS director_1 '
+ 'ON director_1.id = anon_1.movie_director_id '
+ 'JOIN director_photo '
+ 'ON director_1.id = director_photo.director_id '
'ORDER BY director_1.id' % (
- " DISTINCT" if expect_distinct else "")
+ " DISTINCT" if expect_distinct else "")
)
result = s.execute(q3)
rows = result.fetchall()
(2, '/2.jpg', 1, 1),
]))
-
movies = q.all()
# check number of persistent objects in session
class Parent(ComparableEntity, Base):
__tablename__ = 'parent'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
name = Column(String(20))
children = relationship('Child',
- back_populates='parent',
- lazy='noload'
- )
+ back_populates='parent',
+ lazy='noload'
+ )
class Child(ComparableEntity, Base):
__tablename__ = 'child'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer, primary_key=True,
+ test_needs_autoincrement=True)
name = Column(String(20))
parent_id = Column(Integer, ForeignKey('parent.id'))
- parent = relationship('Parent', back_populates='children', lazy='joined')
+ parent = relationship(
+ 'Parent', back_populates='children', lazy='joined')
@classmethod
def insert_data(cls):
s = Session()
- # here we have Parent->subqueryload->Child->joinedload->parent->noload->children.
+ # here we have
+ # Parent->subqueryload->Child->joinedload->parent->noload->children.
# the actual subqueryload has to emit *after* we've started populating
# Parent->subqueryload->child.
parent = s.query(Parent).options([subqueryload('children')]).first()
def test_twolevel_subquery_w_polymorphic(self):
Foo, Bar = self.classes('Foo', 'Bar')
- I = with_polymorphic(Foo, "*", aliased=True)
- attr1 = Foo.foo.of_type(I)
- attr2 = I.foo
+ r = with_polymorphic(Foo, "*", aliased=True)
+ attr1 = Foo.foo.of_type(r)
+ attr2 = r.foo
s = Session()
q = s.query(Foo).filter(Foo.id == 2).options(
from sqlalchemy.testing import fixtures
from sqlalchemy import Integer, String, ForeignKey, func
from sqlalchemy.orm import mapper, relationship, backref, \
- create_session, unitofwork, attributes,\
- Session, class_mapper, sync, exc as orm_exc
+ create_session, unitofwork, attributes,\
+ Session, class_mapper, sync, exc as orm_exc
class AssertsUOW(object):
uow.register_object(d, isdelete=True)
return uow
+
class SyncTest(fixtures.MappedTest,
- testing.AssertsExecutionResults, AssertsUOW):
+ testing.AssertsExecutionResults, AssertsUOW):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer))
Table('t2', metadata,
- Column('id', Integer, ForeignKey('t1.id'), primary_key=True),
- Column('t1id', Integer, ForeignKey('t1.id')),
- )
+ Column('id', Integer, ForeignKey('t1.id'), primary_key=True),
+ Column('t1id', Integer, ForeignKey('t1.id')))
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
session = create_session()
uowcommit = self._get_test_uow(session)
a_mapper = class_mapper(A)
- b_mapper= class_mapper(B)
+ b_mapper = class_mapper(B)
self.a1 = a1 = A()
self.b1 = b1 = B()
uowcommit = self._get_test_uow(session)
"Can't execute sync rule for source column 't2.id'; "
r"mapper 'Mapper\|A\|t1' does not map this column.",
sync.populate,
- a1,
- a_mapper,
- b1,
- b_mapper,
- pairs,
- uowcommit, False
+ a1,
+ a_mapper,
+ b1,
+ b_mapper,
+ pairs,
+ uowcommit, False
)
def test_populate_unmapped_dest(self):
pairs = [(a_mapper.c.id, a_mapper.c.id,)]
assert_raises_message(
orm_exc.UnmappedColumnError,
- "Can't execute sync rule for destination "
- r"column 't1.id'; mapper 'Mapper\|B\|t2' does not map this column.",
+ r"Can't execute sync rule for destination "
+ r"column 't1.id'; "
+ r"mapper 'Mapper\|B\|t2' does not map this column.",
sync.populate,
- a1,
- a_mapper,
- b1,
- b_mapper,
- pairs,
- uowcommit, False
+ a1,
+ a_mapper,
+ b1,
+ b_mapper,
+ pairs,
+ uowcommit, False
)
def test_clear(self):
pairs = [(a_mapper.c.id, b_mapper.c.id,)]
dest = {}
sync.populate_dict(a1, a_mapper, dest, pairs)
- eq_(dest, {'id':10})
+ eq_(dest, {'id': 10})
def test_populate_dict_unmapped(self):
uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
a1._commit_all(a1.dict)
a1.obj().foo = 12
pairs = [(a_mapper.c.id, b_mapper.c.id,),
- (a_mapper.c.foo, b_mapper.c.id)]
+ (a_mapper.c.foo, b_mapper.c.id)]
eq_(
sync.source_modified(uowcommit, a1, a_mapper, pairs),
True
a1.obj().foo = 10
a1._commit_all(a1.dict)
pairs = [(a_mapper.c.id, b_mapper.c.id,),
- (a_mapper.c.foo, b_mapper.c.id)]
+ (a_mapper.c.foo, b_mapper.c.id)]
eq_(
sync.source_modified(uowcommit, a1, a_mapper, pairs),
False
from test.orm._fixtures import FixtureTest
from sqlalchemy import inspect
+
class SessionTransactionTest(FixtureTest):
run_inserts = None
__backend__ = True
trans.commit()
assert len(sess.query(User).all()) == 1
- except:
+ except Exception:
conn.close()
raise
trans2 = sess.begin(subtransactions=True)
try:
raise Exception("test")
- except:
+ except Exception:
trans2.rollback(_capture_exception=True)
assert_raises_message(
sa_exc.InvalidRequestError,
class UnitOfWorkTest(object):
pass
+
class HistoryTest(_fixtures.FixtureTest):
run_inserts = None
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
def test_backref(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
am = mapper(Address, addresses)
m = mapper(User, users, properties=dict(
- addresses = relationship(am, backref='user', lazy='joined')))
+ addresses=relationship(am, backref='user', lazy='joined')))
session = create_session(autocommit=False)
assert u.addresses[0].user == u
session.close()
+
class UnicodeTest(fixtures.MappedTest):
__requires__ = ('unicode_connections',)
sa.Unicode(50, collation="utf8_unicode_ci"), "mysql")
Table('uni_t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('txt', uni_type, unique=True))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('txt', uni_type, unique=True))
Table('uni_t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('txt', uni_type, ForeignKey('uni_t1')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('txt', uni_type, ForeignKey('uni_t1')))
@classmethod
def setup_classes(cls):
class Test(cls.Basic):
pass
+
class Test2(cls.Basic):
pass
def test_relationship(self):
Test, uni_t2, uni_t1, Test2 = (self.classes.Test,
- self.tables.uni_t2,
- self.tables.uni_t1,
- self.classes.Test2)
+ self.tables.uni_t2,
+ self.tables.uni_t1,
+ self.classes.Test2)
mapper(Test, uni_t1, properties={
't2s': relationship(Test2)})
t1 = session.query(Test).filter_by(id=t1.id).one()
assert len(t1.t2s) == 2
+
class UnicodeSchemaTest(fixtures.MappedTest):
__requires__ = ('unicode_connections', 'unicode_ddl',)
@classmethod
def define_tables(cls, metadata):
t1 = Table('unitable1', metadata,
- Column(u('méil'), Integer, primary_key=True, key='a', test_needs_autoincrement=True),
- Column(ue('\u6e2c\u8a66'), Integer, key='b'),
- Column('type', String(20)),
- test_needs_fk=True,
- test_needs_autoincrement=True)
+ Column(u('méil'), Integer, primary_key=True,
+ key='a', test_needs_autoincrement=True),
+ Column(ue('\u6e2c\u8a66'), Integer, key='b'),
+ Column('type', String(20)),
+ test_needs_fk=True,
+ test_needs_autoincrement=True)
t2 = Table(u('Unitéble2'), metadata,
- Column(u('méil'), Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
- Column(ue('\u6e2c\u8a66'), Integer,
- ForeignKey('unitable1.a'), key="d"),
- Column(ue('\u6e2c\u8a66_2'), Integer, key="e"),
- test_needs_fk=True,
- test_needs_autoincrement=True)
+ Column(u('méil'), Integer, primary_key=True,
+ key="cc", test_needs_autoincrement=True),
+ Column(ue('\u6e2c\u8a66'), Integer,
+ ForeignKey('unitable1.a'), key="d"),
+ Column(ue('\u6e2c\u8a66_2'), Integer, key="e"),
+ test_needs_fk=True,
+ test_needs_autoincrement=True)
cls.tables['t1'] = t1
cls.tables['t2'] = t2
def teardown_class(cls):
super(UnicodeSchemaTest, cls).teardown_class()
- @testing.fails_on('mssql+pyodbc',
- 'pyodbc returns a non unicode encoding of the results description.')
+ @testing.fails_on(
+ 'mssql+pyodbc',
+ 'pyodbc returns a non unicode encoding of the results description.')
def test_mapping(self):
t2, t1 = self.tables.t2, self.tables.t1
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, t1, properties={
- 't2s':relationship(B)})
+ 't2s': relationship(B)})
mapper(B, t2)
a1 = A()
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
- @testing.fails_on('mssql+pyodbc',
- 'pyodbc returns a non unicode encoding of the results description.')
+ @testing.fails_on(
+ 'mssql+pyodbc',
+ 'pyodbc returns a non unicode encoding of the results description.')
def test_inheritance_mapping(self):
t2, t1 = self.tables.t2, self.tables.t1
class A(fixtures.ComparableEntity):
pass
+
class B(A):
pass
eq_([A(b=5), B(e=7)], session.query(A).all())
+
class BinaryHistTest(fixtures.MappedTest, testing.AssertsExecutionResults):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', sa.Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', sa.LargeBinary),
- )
+ Column('id', sa.Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', sa.LargeBinary))
@classmethod
def setup_classes(cls):
sa.orm.attributes.get_history(f1, "data"),
((), [data], ())
)
+
def go():
s.flush()
self.assert_sql_count(testing.db, go, 0)
+
class PKTest(fixtures.MappedTest):
@classmethod
Column('data', String(30)))
Table('multipk3', metadata,
Column('pri_code', String(30), key='primary', primary_key=True),
- Column('sec_code', String(30), key='secondary', primary_key=True),
- Column('date_assigned', sa.Date, key='assigned', primary_key=True),
+ Column('sec_code', String(30), key='secondary',
+ primary_key=True),
+ Column('date_assigned', sa.Date,
+ key='assigned', primary_key=True),
Column('data', String(30)))
@classmethod
mapper(Entry, multipk3)
- e = Entry(primary= 'pk1', secondary='pk2',
- assigned=datetime.date.today(), data='some more data')
+ e = Entry(primary='pk1', secondary='pk2',
+ assigned=datetime.date.today(), data='some more data')
session = create_session()
session.add(e)
def setup_classes(cls):
class Person(cls.Basic):
pass
+
class PersonSite(cls.Basic):
pass
def test_basic(self):
peoplesites, PersonSite, Person, people = (self.tables.peoplesites,
- self.classes.PersonSite,
- self.classes.Person,
- self.tables.people)
+ self.classes.PersonSite,
+ self.classes.Person,
+ self.tables.people)
m1 = mapper(PersonSite, peoplesites)
m2 = mapper(Person, people, properties={
- 'sites' : relationship(PersonSite)})
+ 'sites': relationship(PersonSite)})
sa.orm.configure_mappers()
eq_(list(m2.get_property('sites').synchronize_pairs),
session.flush()
p_count = select([func.count('*')]).where(
- people.c.person=='im the key').scalar()
+ people.c.person == 'im the key').scalar()
eq_(p_count, 1)
- eq_(select([func.count('*')]).where(peoplesites.c.person=='im the key').scalar(), 1)
+ eq_(select([func.count('*')]).where(peoplesites.c.person ==
+ 'im the key').scalar(), 1)
class ClauseAttributesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('counter', Integer, default=1))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('counter', Integer, default=1))
Table('boolean_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('value', Boolean),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('value', Boolean))
@classmethod
def setup_classes(cls):
assert_raises_message(
TypeError,
"Boolean value of this clause is not defined",
- bool, None == sa.false()
+ bool, None == sa.false() # noqa
)
s = create_session()
hb = HasBoolean(value=None)
@classmethod
def define_tables(cls, metadata):
Table('mytable', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)),
test_needs_fk=True)
Table('myothertable', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('parent_id', Integer),
Column('data', String(30)),
sa.ForeignKeyConstraint(['parent_id'],
def setup_classes(cls):
class MyClass(cls.Basic):
pass
+
class MyOtherClass(cls.Basic):
pass
def test_basic(self):
- myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
- self.classes.MyClass,
- self.classes.MyOtherClass,
- self.tables.mytable)
+ myothertable, MyClass, MyOtherClass, mytable = (
+ self.tables.myothertable,
+ self.classes.MyClass,
+ self.classes.MyOtherClass,
+ self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
- 'children':relationship(MyOtherClass,
- passive_deletes=True,
- cascade="all")})
+ 'children': relationship(MyOtherClass,
+ passive_deletes=True,
+ cascade="all")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
eq_(select([func.count('*')]).select_from(mytable).scalar(), 0)
eq_(select([func.count('*')]).select_from(myothertable).scalar(), 0)
- @testing.emits_warning(r".*'passive_deletes' is normally configured on one-to-many")
+ @testing.emits_warning(
+ r".*'passive_deletes' is normally configured on one-to-many")
def test_backwards_pd(self):
"""Test that passive_deletes=True disables a delete from an m2o.
"""
- myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
- self.classes.MyClass,
- self.classes.MyOtherClass,
- self.tables.mytable)
+ myothertable, MyClass, MyOtherClass, mytable = (
+ self.tables.myothertable,
+ self.classes.MyClass,
+ self.classes.MyOtherClass,
+ self.tables.mytable)
mapper(MyOtherClass, myothertable, properties={
- 'myclass':relationship(MyClass, cascade="all, delete", passive_deletes=True)
+ 'myclass': relationship(MyClass, cascade="all, delete",
+ passive_deletes=True)
})
mapper(MyClass, mytable)
eq_(select([func.count('*')]).select_from(myothertable).scalar(), 0)
def test_aaa_m2o_emits_warning(self):
- myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
- self.classes.MyClass,
- self.classes.MyOtherClass,
- self.tables.mytable)
+ myothertable, MyClass, MyOtherClass, mytable = (
+ self.tables.myothertable,
+ self.classes.MyClass,
+ self.classes.MyOtherClass,
+ self.tables.mytable)
mapper(MyOtherClass, myothertable, properties={
- 'myclass':relationship(MyClass, cascade="all, delete", passive_deletes=True)
+ 'myclass': relationship(MyClass, cascade="all, delete",
+ passive_deletes=True)
})
mapper(MyClass, mytable)
assert_raises(sa.exc.SAWarning, sa.orm.configure_mappers)
+
class BatchDeleteIgnoresRowcountTest(fixtures.DeclarativeMappedTest):
__requires__ = ('foreign_keys',)
+
@classmethod
def setup_classes(cls):
class A(cls.DeclarativeBasic):
# no issue with multi-row count here
session.flush()
+
class ExtraPassiveDeletesTest(fixtures.MappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def define_tables(cls, metadata):
Table('mytable', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)),
test_needs_fk=True)
Table('myothertable', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('parent_id', Integer),
Column('data', String(30)),
# no CASCADE, the same as ON DELETE RESTRICT
def setup_classes(cls):
class MyClass(cls.Basic):
pass
+
class MyOtherClass(cls.Basic):
pass
def test_assertions(self):
- myothertable, MyOtherClass = self.tables.myothertable, self.classes.MyOtherClass
+ myothertable, MyOtherClass = (self.tables.myothertable,
+ self.classes.MyOtherClass)
mytable, MyClass = self.tables.mytable, self.classes.MyClass
mapper(MyClass, mytable, properties={
'foo': relationship(MyOtherClass,
- passive_deletes='all',
- cascade="all")
- })
+ passive_deletes='all',
+ cascade="all")
+ })
mapper(MyOtherClass, myothertable)
assert_raises_message(
sa.exc.ArgumentError,
- "On MyClass.foo, can't set passive_deletes='all' in conjunction with 'delete' "
+ "On MyClass.foo, can't set passive_deletes='all' in conjunction "
+ "with 'delete' "
"or 'delete-orphan' cascade",
sa.orm.configure_mappers
)
def test_extra_passive(self):
myothertable, MyClass, MyOtherClass, mytable = (
- self.tables.myothertable,
- self.classes.MyClass,
- self.classes.MyOtherClass,
- self.tables.mytable)
+ self.tables.myothertable,
+ self.classes.MyClass,
+ self.classes.MyOtherClass,
+ self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
- passive_deletes='all',
- cascade="save-update")})
+ passive_deletes='all',
+ cascade="save-update")})
session = create_session()
mc = MyClass()
assert_raises(sa.exc.DBAPIError, session.flush)
def test_extra_passive_2(self):
- myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
- self.classes.MyClass,
- self.classes.MyOtherClass,
- self.tables.mytable)
+ myothertable, MyClass, MyOtherClass, mytable = (
+ self.tables.myothertable,
+ self.classes.MyClass,
+ self.classes.MyOtherClass,
+ self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
- passive_deletes='all',
- cascade="save-update")})
+ passive_deletes='all',
+ cascade="save-update")})
session = create_session()
mc = MyClass()
assert_raises(sa.exc.DBAPIError, session.flush)
def test_dont_emit(self):
- myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
- self.classes.MyClass,
- self.classes.MyOtherClass,
- self.tables.mytable)
+ myothertable, MyClass, MyOtherClass, mytable = (
+ self.tables.myothertable,
+ self.classes.MyClass,
+ self.classes.MyOtherClass,
+ self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
- passive_deletes='all',
- cascade="save-update")})
+ passive_deletes='all',
+ cascade="save-update")})
session = Session()
mc = MyClass()
session.add(mc)
# no load for "children" should occur
self.assert_sql_count(testing.db, session.flush, 1)
+
class ColumnCollisionTest(fixtures.MappedTest):
"""Ensure the mapper doesn't break bind param naming rules on flush."""
@classmethod
def define_tables(cls, metadata):
Table('book', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('book_id', String(50)),
- Column('title', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('book_id', String(50)),
+ Column('title', String(50)))
def test_naming(self):
book = self.tables.book
)
-
class DefaultTest(fixtures.MappedTest):
"""Exercise mappings on columns with DefaultGenerators.
@classmethod
def define_tables(cls, metadata):
- use_string_defaults = testing.against('postgresql', 'oracle', 'sqlite', 'mssql')
+ use_string_defaults = testing.against(
+ 'postgresql', 'oracle', 'sqlite', 'mssql')
if use_string_defaults:
hohotype = String(30)
cls.other['althohoval'] = althohoval
dt = Table('default_t', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('hoho', hohotype, server_default=str(hohoval)),
- Column('counter', Integer, default=sa.func.char_length("1234567", type_=Integer)),
- Column('foober', String(30), default="im foober", onupdate="im the update"),
- mysql_engine='MyISAM')
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('hoho', hohotype, server_default=str(hohoval)),
+ Column('counter', Integer, default=sa.func.char_length(
+ "1234567", type_=Integer)),
+ Column('foober', String(30), default="im foober",
+ onupdate="im the update"),
+ mysql_engine='MyISAM')
st = Table('secondary_table', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('data', String(50)),
- mysql_engine='MyISAM')
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ mysql_engine='MyISAM')
if testing.against('postgresql', 'oracle'):
dt.append_column(
def setup_classes(cls):
class Hoho(cls.Comparable):
pass
+
class Secondary(cls.Comparable):
pass
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_insert(self):
althohoval, hohoval, default_t, Hoho = (self.other.althohoval,
- self.other.hohoval,
- self.tables.default_t,
- self.classes.Hoho)
+ self.other.hohoval,
+ self.tables.default_t,
+ self.classes.Hoho)
mapper(Hoho, default_t)
eq_(h3.hoho, althohoval)
self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
self.assert_(h3.counter == h2.counter == 12)
- self.assert_(h1.counter == h4.counter == h5.counter == 7)
+ self.assert_(h1.counter == h4.counter == h5.counter == 7)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
eq_(h5.foober, 'im the new foober')
@testing.fails_on("oracle+cx_oracle", "seems like a cx_oracle bug")
def test_eager_defaults(self):
hohoval, default_t, Hoho = (self.other.hohoval,
- self.tables.default_t,
- self.classes.Hoho)
+ self.tables.default_t,
+ self.classes.Hoho)
Secondary = self.classes.Secondary
mapper(Hoho, default_t, eager_defaults=True, properties={
- "sec": relationship(Secondary),
- "syn": sa.orm.synonym(default_t.c.counter)
- })
-
+ "sec": relationship(Secondary),
+ "syn": sa.orm.synonym(default_t.c.counter)
+ })
mapper(Secondary, self.tables.secondary_table)
h1 = Hoho()
eq_(h2.hoho, hohoval)
eq_(h2.counter, 5)
-
def test_insert_nopostfetch(self):
default_t, Hoho = self.tables.default_t, self.classes.Hoho
def test_used_in_relationship(self):
"""A server-side default can be used as the target of a foreign key"""
- Hoho, hohoval, default_t, secondary_table, Secondary = (self.classes.Hoho,
- self.other.hohoval,
- self.tables.default_t,
- self.tables.secondary_table,
- self.classes.Secondary)
-
+ Hoho, hohoval, default_t, secondary_table, Secondary = (
+ self.classes.Hoho,
+ self.other.hohoval,
+ self.tables.default_t,
+ self.tables.secondary_table,
+ self.classes.Secondary)
mapper(Hoho, default_t, properties={
- 'secondaries':relationship(Secondary, order_by=secondary_table.c.id)})
+ 'secondaries': relationship(Secondary,
+ order_by=secondary_table.c.id)})
mapper(Secondary, secondary_table)
h1 = Hoho()
eq_(session.query(Hoho).get(h1.id),
Hoho(hoho=hohoval,
secondaries=[
- Secondary(data='s1')]))
+ Secondary(data='s1')]))
h1 = session.query(Hoho).get(h1.id)
h1.secondaries.append(Secondary(data='s2'))
eq_(session.query(Hoho).get(h1.id),
Hoho(hoho=hohoval,
secondaries=[
- Secondary(data='s1'),
- Secondary(data='s2')]))
+ Secondary(data='s1'),
+ Secondary(data='s2')]))
+
class ColumnPropertyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('data', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('a', String(50)),
- Column('b', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('a', String(50)),
+ Column('b', String(50)))
Table('subdata', metadata,
- Column('id', Integer, ForeignKey('data.id'), primary_key=True),
- Column('c', String(50)),
- )
+ Column('id', Integer, ForeignKey('data.id'), primary_key=True),
+ Column('c', String(50)))
@classmethod
def setup_mappers(cls):
Data, data = self.classes.Data, self.tables.data
mapper(Data, data, properties={
- 'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b)
+ 'aplusb': column_property(
+ data.c.a + literal_column("' '") + data.c.b)
})
self._test(True)
Data, data = self.classes.Data, self.tables.data
mapper(Data, data, properties={
- 'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b,
- expire_on_flush=False)
+ 'aplusb': column_property(
+ data.c.a + literal_column("' '") + data.c.b,
+ expire_on_flush=False)
})
self._test(False)
Data, data = self.classes.Data, self.tables.data
m = mapper(Data, data)
- m.add_property('aplusb', column_property(data.c.a + literal_column("' '") + data.c.b))
+ m.add_property('aplusb', column_property(
+ data.c.a + literal_column("' '") + data.c.b))
self._test(True)
def test_with_inheritance(self):
subdata, data, Data = (self.tables.subdata,
- self.tables.data,
- self.classes.Data)
+ self.tables.data,
+ self.classes.Data)
class SubData(Data):
pass
mapper(Data, data, properties={
- 'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b)
+ 'aplusb': column_property(
+ data.c.a + literal_column("' '") + data.c.b)
})
mapper(SubData, subdata, inherits=Data)
else:
eq_(d1.aplusb, "hello there")
-
d1.b = 'foobar'
d1.aplusb = 'im setting this explicitly'
sess.flush()
eq_(d1.aplusb, "im setting this explicitly")
+
class OneToManyTest(_fixtures.FixtureTest):
run_inserts = None
"""Basic save of one to many."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select')
+ addresses=relationship(mapper(Address, addresses), lazy='select')
))
- u = User(name= 'one2manytester')
+ u = User(name='one2manytester')
a = Address(email_address='one2many@test.org')
u.addresses.append(a)
"""Modifying the child items of an object."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select')))
+ addresses=relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u1.addresses = []
("UPDATE addresses SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
[
- {'user_id': None, 'addresses_id': a1.id},
- {'user_id': u1.id, 'addresses_id': a3.id}
- ]),
+ {'user_id': None, 'addresses_id': a1.id},
+ {'user_id': u1.id, 'addresses_id': a3.id}
+ ]),
- ])
+ ])
def test_child_move(self):
"""Moving a child from one parent to another, with a delete.
"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select')))
+ addresses=relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
def test_child_move_2(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select')))
+ addresses=relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
def test_o2m_delete_parent(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- address = relationship(mapper(Address, addresses),
- lazy='select',
- uselist=False)))
+ address=relationship(mapper(Address, addresses),
+ lazy='select',
+ uselist=False)))
u = User(name='one2onetester')
a = Address(email_address='myonlyaddress@foo.com')
assert a.id is not None
assert a.user_id is None
assert sa.orm.attributes.instance_state(a).key in session.identity_map
- assert sa.orm.attributes.instance_state(u).key not in session.identity_map
+ assert sa.orm.attributes.instance_state(
+ u).key not in session.identity_map
def test_one_to_one(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- address = relationship(mapper(Address, addresses),
- lazy='select',
- uselist=False)))
+ address=relationship(mapper(Address, addresses),
+ lazy='select',
+ uselist=False)))
u = User(name='one2onetester')
u.address = Address(email_address='myonlyaddress@foo.com')
def test_bidirectional(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
m1 = mapper(User, users)
m2 = mapper(Address, addresses, properties=dict(
- user = relationship(m1, lazy='joined', backref='addresses')))
-
+ user=relationship(m1, lazy='joined', backref='addresses')))
u = User(name='test')
a = Address(email_address='testaddress', user=u)
def test_double_relationship(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m2 = mapper(Address, addresses)
m = mapper(User, users, properties={
- 'boston_addresses' : relationship(m2, primaryjoin=
- sa.and_(users.c.id==addresses.c.user_id,
- addresses.c.email_address.like('%boston%'))),
- 'newyork_addresses' : relationship(m2, primaryjoin=
- sa.and_(users.c.id==addresses.c.user_id,
- addresses.c.email_address.like('%newyork%')))})
+ 'boston_addresses': relationship(
+ m2,
+ primaryjoin=sa.and_(
+ users.c.id == addresses.c.user_id,
+ addresses.c.email_address.like('%boston%'))),
+ 'newyork_addresses': relationship(
+ m2,
+ primaryjoin=sa.and_(
+ users.c.id == addresses.c.user_id,
+ addresses.c.email_address.like('%newyork%')))})
u = User(name='u1')
a = Address(email_address='foo@boston.com')
session.add(u)
session.flush()
+
class SaveTest(_fixtures.FixtureTest):
run_inserts = None
class SUser(fixtures.BasicEntity):
def _get_name(self):
return "User:" + self.name
+
def _set_name(self, name):
self.name = name + ":User"
syn_name = property(_get_name, _set_name)
"""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
+ mapper(User, users, properties={
'addresses': relationship(mapper(Address, addresses))})
u = User(name='u1')
"""a user object that also has the users mailing address."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
m1 = mapper(User, users)
# define a mapper for AddressUser that inherits the User.mapper, and
# joins on the id column
mapper(AddressUser, addresses, inherits=m1, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
au = AddressUser(name='u', email_address='u@e')
orders, Order = self.tables.orders, self.classes.Order
-
mapper(Order, orders, properties={
'description': sa.orm.deferred(orders.c.description)})
# assert that a set operation doesn't trigger a load operation
o = session.query(Order).filter(Order.description == 'foo').one()
+
def go():
o.description = 'hoho'
self.sql_count_(0, go)
"""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
usersaddresses = sa.join(users, addresses,
users.c.id == addresses.c.user_id)
m = mapper(User, usersaddresses,
- properties=dict(
- email = addresses.c.email_address,
- foo_id = [users.c.id, addresses.c.user_id]))
+ properties=dict(
+ email=addresses.c.email_address,
+ foo_id=[users.c.id, addresses.c.user_id]))
u = User(name='multitester', email='multi@test.org')
session = create_session()
u = session.query(User).get(id)
assert u.name == 'multitester'
- user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
+ user_rows = users.select(users.c.id.in_(
+ [u.foo_id])).execute().fetchall()
eq_(list(user_rows[0].values()), [u.foo_id, 'multitester'])
- address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
+ address_rows = addresses.select(
+ addresses.c.id.in_([u.id])).execute().fetchall()
eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'multi@test.org'])
u.email = 'lala@hey.com'
u.name = 'imnew'
session.flush()
- user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
+ user_rows = users.select(users.c.id.in_(
+ [u.foo_id])).execute().fetchall()
eq_(list(user_rows[0].values()), [u.foo_id, 'imnew'])
- address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
+ address_rows = addresses.select(
+ addresses.c.id.in_([u.id])).execute().fetchall()
eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'lala@hey.com'])
session.expunge_all()
"""The history lazy-fetches data when it wasn't otherwise loaded."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, cascade="all, delete-orphan")})
+ 'addresses': relationship(Address, cascade="all, delete-orphan")})
mapper(Address, addresses)
u = User(name='u1')
users, User = self.tables.users, self.classes.User
-
names = []
+
class Events(object):
def before_insert(self, mapper, connection, instance):
self.current_instance = instance
names.append(instance.name)
+
def after_insert(self, mapper, connection, instance):
assert instance is self.current_instance
def test_m2o_one_to_one(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
# TODO: put assertion in here !!!
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy='select', uselist=False)))
+ user=relationship(mapper(User, users), lazy='select',
+ uselist=False)))
session = create_session()
data = [
- {'name': 'thesub' , 'email_address': 'bar@foo.com'},
- {'name': 'assdkfj' , 'email_address': 'thesdf@asdf.com'},
- {'name': 'n4knd' , 'email_address': 'asf3@bar.org'},
- {'name': 'v88f4' , 'email_address': 'adsd5@llala.net'},
- {'name': 'asdf8d' , 'email_address': 'theater@foo.com'}
+ {'name': 'thesub', 'email_address': 'bar@foo.com'},
+ {'name': 'assdkfj', 'email_address': 'thesdf@asdf.com'},
+ {'name': 'n4knd', 'email_address': 'asf3@bar.org'},
+ {'name': 'v88f4', 'email_address': 'adsd5@llala.net'},
+ {'name': 'asdf8d', 'email_address': 'theater@foo.com'}
]
objects = []
for elem in data:
objects[3].user = User()
objects[3].user.name = 'imnewlyadded'
self.assert_sql_execution(testing.db,
- session.flush,
- CompiledSQL("INSERT INTO users (name) VALUES (:name)",
- {'name': 'imnewlyadded'} ),
-
- AllOf(
- CompiledSQL("UPDATE addresses SET email_address=:email_address "
- "WHERE addresses.id = :addresses_id",
- lambda ctx: {'email_address': 'imnew@foo.bar',
- 'addresses_id': objects[2].id}),
- CompiledSQL("UPDATE addresses SET user_id=:user_id "
+ session.flush,
+ CompiledSQL("INSERT INTO users (name) "
+ "VALUES (:name)",
+ {'name': 'imnewlyadded'}),
+
+ AllOf(
+ CompiledSQL(
+ "UPDATE addresses "
+ "SET email_address=:email_address "
+ "WHERE addresses.id = :addresses_id",
+ lambda ctx: {
+ 'email_address': 'imnew@foo.bar',
+ 'addresses_id': objects[2].id}),
+ CompiledSQL(
+ "UPDATE addresses "
+ "SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
- lambda ctx: {'user_id': objects[3].user.id,
- 'addresses_id': objects[3].id})
- )
- )
-
- l = sa.select([users, addresses],
- sa.and_(users.c.id==addresses.c.user_id,
- addresses.c.id==a.id)).execute()
- eq_(list(l.first().values()),
+ lambda ctx: {
+ 'user_id': objects[3].user.id,
+ 'addresses_id': objects[3].id}))
+ )
+
+ result = sa.select([users, addresses],
+ sa.and_(users.c.id == addresses.c.user_id,
+ addresses.c.id == a.id)).execute()
+ eq_(list(result.first().values()),
[a.user.id, 'asdf8d', a.id, a.user_id, 'theater@foo.com'])
def test_many_to_one_1(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy='select')))
+ user=relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
def test_many_to_one_2(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy='select')))
+ user=relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
a2 = Address(email_address='emailaddress2')
def test_many_to_one_3(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy='select')))
+ user=relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
def test_bidirectional_no_load(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='noload')})
+ 'addresses': relationship(Address, backref='user', lazy='noload')})
mapper(Address, addresses)
# try it on unsaved objects
run_inserts = None
def test_many_to_many(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
m = mapper(Item, items, properties=dict(
- keywords=relationship(Keyword,
+ keywords=relationship(Keyword,
item_keywords,
lazy='joined',
order_by=keywords.c.name)))
data = [Item,
- {'description': 'mm_item1',
- 'keywords' : (Keyword, [{'name': 'big'},
- {'name': 'green'},
- {'name': 'purple'},
- {'name': 'round'}])},
- {'description': 'mm_item2',
- 'keywords' : (Keyword, [{'name':'blue'},
- {'name':'imnew'},
- {'name':'round'},
- {'name':'small'}])},
- {'description': 'mm_item3',
- 'keywords' : (Keyword, [])},
- {'description': 'mm_item4',
- 'keywords' : (Keyword, [{'name':'big'},
- {'name':'blue'},])},
- {'description': 'mm_item5',
- 'keywords' : (Keyword, [{'name':'big'},
- {'name':'exacting'},
- {'name':'green'}])},
- {'description': 'mm_item6',
- 'keywords' : (Keyword, [{'name':'red'},
- {'name':'round'},
- {'name':'small'}])}]
+ {'description': 'mm_item1',
+ 'keywords': (Keyword, [{'name': 'big'},
+ {'name': 'green'},
+ {'name': 'purple'},
+ {'name': 'round'}])},
+ {'description': 'mm_item2',
+ 'keywords': (Keyword, [{'name': 'blue'},
+ {'name': 'imnew'},
+ {'name': 'round'},
+ {'name': 'small'}])},
+ {'description': 'mm_item3',
+ 'keywords': (Keyword, [])},
+ {'description': 'mm_item4',
+ 'keywords': (Keyword, [{'name': 'big'},
+ {'name': 'blue'}, ])},
+ {'description': 'mm_item5',
+ 'keywords': (Keyword, [{'name': 'big'},
+ {'name': 'exacting'},
+ {'name': 'green'}])},
+ {'description': 'mm_item6',
+ 'keywords': (Keyword, [{'name': 'red'},
+ {'name': 'round'},
+ {'name': 'small'}])}]
session = create_session()
session.add_all(objects)
session.flush()
- l = (session.query(Item).
- filter(Item.description.in_([e['description']
- for e in data[1:]])).
- order_by(Item.description).all())
- self.assert_result(l, *data)
+ result = (session.query(Item).
+ filter(Item.description.in_([e['description']
+ for e in data[1:]])).
+ order_by(Item.description).all())
+ self.assert_result(result, *data)
objects[4].description = 'item4updated'
k = Keyword()
session.flush,
AllOf(
CompiledSQL("UPDATE items SET description=:description "
- "WHERE items.id = :items_id",
- {'description': 'item4updated',
- 'items_id': objects[4].id},
- ),
+ "WHERE items.id = :items_id",
+ {'description': 'item4updated',
+ 'items_id': objects[4].id},
+ ),
CompiledSQL("INSERT INTO keywords (name) "
- "VALUES (:name)",
- {'name': 'yellow'},
- )
+ "VALUES (:name)",
+ {'name': 'yellow'},
+ )
),
CompiledSQL("INSERT INTO item_keywords (item_id, keyword_id) "
- "VALUES (:item_id, :keyword_id)",
- lambda ctx: [{'item_id': objects[5].id,
- 'keyword_id': k.id}])
- )
+ "VALUES (:item_id, :keyword_id)",
+ lambda ctx: [{'item_id': objects[5].id,
+ 'keyword_id': k.id}])
+ )
objects[2].keywords.append(k)
dkid = objects[5].keywords[1].id
testing.db,
session.flush,
CompiledSQL("DELETE FROM item_keywords "
- "WHERE item_keywords.item_id = :item_id AND "
- "item_keywords.keyword_id = :keyword_id",
- [{'item_id': objects[5].id, 'keyword_id': dkid}]),
+ "WHERE item_keywords.item_id = :item_id AND "
+ "item_keywords.keyword_id = :keyword_id",
+ [{'item_id': objects[5].id, 'keyword_id': dkid}]),
CompiledSQL("INSERT INTO item_keywords (item_id, keyword_id) "
- "VALUES (:item_id, :keyword_id)",
- lambda ctx: [{'item_id': objects[2].id, 'keyword_id': k.id}]
- ))
+ "VALUES (:item_id, :keyword_id)",
+ lambda ctx: [
+ {'item_id': objects[2].id, 'keyword_id': k.id}]
+ ))
session.delete(objects[3])
session.flush()
"""
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, item_keywords, lazy='joined'),
- ))
+ keywords=relationship(Keyword, item_keywords, lazy='joined'),
+ ))
i = Item(description='i1')
k1 = Keyword(name='k1')
def test_scalar(self):
"""sa.dependency won't delete an m2m relationship referencing None."""
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
-
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keyword=relationship(Keyword, secondary=item_keywords, uselist=False)))
+ keyword=relationship(Keyword, secondary=item_keywords,
+ uselist=False)))
i = Item(description='x')
session = create_session()
def test_many_to_many_update(self):
"""Assorted history operations on a many to many"""
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
- secondary=item_keywords,
- lazy='joined',
- order_by=keywords.c.name)))
+ secondary=item_keywords,
+ lazy='joined',
+ order_by=keywords.c.name)))
k1 = Keyword(name='keyword 1')
k2 = Keyword(name='keyword 2')
def test_association(self):
"""Basic test of an association object"""
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
-
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
class IKAssociation(fixtures.ComparableEntity):
pass
# affected this, but was fixed again
mapper(IKAssociation, item_keywords,
- primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
+ primary_key=[item_keywords.c.item_id,
+ item_keywords.c.keyword_id],
properties=dict(
- keyword=relationship(mapper(Keyword, keywords, non_primary=True),
- lazy='joined',
- uselist=False,
- order_by=keywords.c.name # note here is a valid place where order_by can be used
- ))) # on a scalar relationship(); to determine eager ordering of
- # the parent object within its collection.
+ keyword=relationship(mapper(Keyword, keywords,
+ non_primary=True),
+ lazy='joined',
+ uselist=False,
+ # note here is a valid place where
+ # order_by can be used on a scalar
+ # relationship(); to determine eager
+ # ordering of the parent object within
+ # its collection.
+ order_by=keywords.c.name)))
mapper(Item, items, properties=dict(
keywords=relationship(IKAssociation, lazy='joined')))
def test_m2o_nonmatch(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties=dict(
- user = relationship(User, lazy='select', uselist=False)))
+ user=relationship(User, lazy='select', uselist=False)))
session = create_session()
testing.db,
session.flush,
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
- {'name': 'u1'}),
+ {'name': 'u1'}),
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
- {'name': 'u2'}),
+ {'name': 'u2'}),
CompiledSQL("INSERT INTO addresses (user_id, email_address) "
- "VALUES (:user_id, :email_address)",
- {'user_id': 1, 'email_address': 'a1'}),
+ "VALUES (:user_id, :email_address)",
+ {'user_id': 1, 'email_address': 'a1'}),
CompiledSQL("INSERT INTO addresses (user_id, email_address) "
- "VALUES (:user_id, :email_address)",
- {'user_id': 2, 'email_address': 'a2'}),
+ "VALUES (:user_id, :email_address)",
+ {'user_id': 2, 'email_address': 'a2'}),
)
+
class SaveTest3(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
def setup_classes(cls):
class Keyword(cls.Basic):
pass
+
class Item(cls.Basic):
pass
def test_manytomany_xtracol_delete(self):
- """A many-to-many on a table that has an extra column can properly delete rows from the table without referencing the extra column"""
+ """A many-to-many on a table that has an extra column can properly
+ delete rows from the table without referencing the extra column"""
keywords, items, assoc, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.assoc,
- self.classes.Keyword,
- self.classes.Item)
-
+ self.tables.items,
+ self.tables.assoc,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, secondary=assoc, lazy='joined'),))
+ keywords=relationship(Keyword, secondary=assoc, lazy='joined'),))
i = Item()
k1 = Keyword()
if clear:
sess.expunge_all()
eq_(
- sess.query(T).filter(T.value == True).order_by(T.id).all(),
+ sess.query(T).filter(T.value == True) # noqa
+ .order_by(T.id).all(),
[T(value=True, name="t1"), T(value=True, name="t3")])
if clear:
sess.expunge_all()
eq_(
- sess.query(T).filter(T.value == False).order_by(T.id).all(),
+ sess.query(T).filter(T.value == False) # noqa
+ .order_by(T.id).all(),
[T(value=False, name="t2")])
t2 = sess.query(T).get(t2.id)
t2.value = True
sess.flush()
eq_(
- sess.query(T).filter(T.value == True).order_by(T.id).all(),
+ sess.query(T).filter(T.value == True) # noqa
+ .order_by(T.id).all(),
[
T(value=True, name="t1"),
T(value=True, name="t2"), T(value=True, name="t3")])
t2.value = False
sess.flush()
eq_(
- sess.query(T).filter(T.value == True).order_by(T.id).all(),
+ sess.query(T).filter(T.value == True) # noqa
+ .order_by(T.id).all(),
[T(value=True, name="t1"), T(value=True, name="t3")])
def define_tables(cls, metadata):
# parent
Table('t5', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30), nullable=False))
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30), nullable=False))
# onetomany
Table('t6', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30), nullable=False),
- Column('t5id', Integer, ForeignKey('t5.id'),nullable=False))
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30), nullable=False),
+ Column('t5id', Integer, ForeignKey('t5.id'), nullable=False))
# associated
Table('t7', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(30), nullable=False))
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30), nullable=False))
- #manytomany
+ # manytomany
Table('t5t7', metadata,
- Column('t5id', Integer, ForeignKey('t5.id'),nullable=False),
- Column('t7id', Integer, ForeignKey('t7.id'),nullable=False))
+ Column('t5id', Integer, ForeignKey('t5.id'), nullable=False),
+ Column('t7id', Integer, ForeignKey('t7.id'), nullable=False))
@classmethod
def setup_classes(cls):
def test_onetomany(self):
t6, T6, t5, T5 = (self.tables.t6,
- self.classes.T6,
- self.tables.t5,
- self.classes.T5)
+ self.classes.T6,
+ self.tables.t5,
+ self.classes.T5)
mapper(T5, t5, properties={
- 't6s':relationship(T6, cascade="all, delete-orphan")
+ 't6s': relationship(T6, cascade="all, delete-orphan")
})
mapper(T6, t6)
o6 = T5(data='some other t5', id=o5.id, t6s=[
T6(data='third t6', id=3),
T6(data='fourth t6', id=4),
- ])
+ ])
sess.delete(o5)
sess.add(o6)
sess.flush()
self.classes.T7)
mapper(T5, t5, properties={
- 't7s':relationship(T7, secondary=t5t7, cascade="all")
+ 't7s': relationship(T7, secondary=t5t7, cascade="all")
})
mapper(T7, t7)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some t5')]
- assert testing.rowset(sess.execute(t5t7.select(), mapper=T5)) == set([(1,1), (1, 2)])
- assert list(sess.execute(t7.select(), mapper=T5)) == [(1, 'some t7'), (2, 'some other t7')]
+ assert testing.rowset(sess.execute(
+ t5t7.select(), mapper=T5)) == set([(1, 1), (1, 2)])
+ assert list(sess.execute(t7.select(), mapper=T5)) == [
+ (1, 'some t7'), (2, 'some other t7')]
o6 = T5(data='some other t5', id=1, t7s=[
T7(data='third t7', id=3),
T7(data='fourth t7', id=4),
- ])
+ ])
sess.delete(o5)
assert o5 in sess.deleted
sess.add(o6)
sess.flush()
- assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some other t5')]
- assert list(sess.execute(t7.select(), mapper=T5)) == [(3, 'third t7'), (4, 'fourth t7')]
+ assert list(sess.execute(t5.select(), mapper=T5)) == [
+ (1, 'some other t5')]
+ assert list(sess.execute(t7.select(), mapper=T5)) == [
+ (3, 'third t7'), (4, 'fourth t7')]
def test_manytoone(self):
t6, T6, t5, T5 = (self.tables.t6,
- self.classes.T6,
- self.tables.t5,
- self.classes.T5)
-
+ self.classes.T6,
+ self.tables.t5,
+ self.classes.T5)
mapper(T6, t6, properties={
- 't5':relationship(T5)
+ 't5': relationship(T5)
})
mapper(T5, t5)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some t5')]
- assert list(sess.execute(t6.select(), mapper=T5)) == [(1, 'some t6', 1)]
+ assert list(sess.execute(t6.select(), mapper=T5)) == [
+ (1, 'some t6', 1)]
o6 = T6(data='some other t6', id=1, t5=T5(data='some other t5', id=2))
sess.delete(o5)
sess.add(o6)
sess.flush()
- assert list(sess.execute(t5.select(), mapper=T5)) == [(2, 'some other t5')]
- assert list(sess.execute(t6.select(), mapper=T5)) == [(1, 'some other t6', 2)]
+ assert list(sess.execute(t5.select(), mapper=T5)) == [
+ (2, 'some other t5')]
+ assert list(sess.execute(t6.select(), mapper=T5)) == [
+ (1, 'some other t6', 2)]
+
class InheritingRowSwitchTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
- Column('pid', Integer, primary_key=True),
- Column('pdata', String(30))
- )
+ Column('pid', Integer, primary_key=True),
+ Column('pdata', String(30)))
Table('child', metadata,
- Column('cid', Integer, primary_key=True),
- Column('pid', Integer, ForeignKey('parent.pid')),
- Column('cdata', String(30))
- )
+ Column('cid', Integer, primary_key=True),
+ Column('pid', Integer, ForeignKey('parent.pid')),
+ Column('cdata', String(30)))
@classmethod
def setup_classes(cls):
def test_row_switch_no_child_table(self):
P, C, parent, child = (self.classes.P,
- self.classes.C,
- self.tables.parent,
- self.tables.child)
+ self.classes.C,
+ self.tables.parent,
+ self.tables.child)
mapper(P, parent)
mapper(C, child, inherits=P)
sess.add(c2)
sess.delete(c1)
- self.assert_sql_execution(testing.db, sess.flush,
- CompiledSQL("UPDATE parent SET pdata=:pdata WHERE parent.pid = :parent_pid",
- {'pdata':'c2', 'parent_pid':1}
+ self.assert_sql_execution(
+ testing.db, sess.flush,
+ CompiledSQL(
+ "UPDATE parent SET pdata=:pdata "
+ "WHERE parent.pid = :parent_pid",
+ {'pdata': 'c2', 'parent_pid': 1}
),
# this fires as of [ticket:1362], since we synchronzize
# PK/FKs on UPDATES. c2 is new so the history shows up as
# pure added, update occurs. If a future change limits the
- # sync operation during _save_obj().update, this is safe to remove again.
- CompiledSQL("UPDATE child SET pid=:pid WHERE child.cid = :child_cid",
- {'pid':1, 'child_cid':1}
- )
- )
+ # sync operation during _save_obj().update, this is safe to remove
+ # again.
+ CompiledSQL("UPDATE child SET pid=:pid "
+ "WHERE child.cid = :child_cid",
+ {'pid': 1, 'child_cid': 1}))
+
class TransactionTest(fixtures.MappedTest):
__requires__ = ('deferrable_or_no_constraints',)
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True))
+ Column('id', Integer, primary_key=True))
t2 = Table('t2', metadata,
- Column('id', Integer, primary_key=True),
- Column('t1_id', Integer,
- ForeignKey('t1.id', deferrable=True, initially='deferred')
- ))
+ Column('id', Integer, primary_key=True),
+ Column('t1_id', Integer,
+ ForeignKey('t1.id', deferrable=True,
+ initially='deferred')))
+
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
@classmethod
def setup_mappers(cls):
T2, T1, t2, t1 = (cls.classes.T2,
- cls.classes.T1,
- cls.tables.t2,
- cls.tables.t1)
+ cls.classes.T1,
+ cls.tables.t2,
+ cls.tables.t1)
orm_mapper(T1, t1)
orm_mapper(T2, t2)
try:
session.flush()
assert False
- except:
+ except Exception:
# Flush needs to rollback also when commit fails
assert session.transaction is None
if testing.against('postgresql'):
t1.bind.engine.dispose()
+
class PartialNullPKTest(fixtures.MappedTest):
# sqlite totally fine with NULLs in pk columns.
# no other DB is like this.
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('col1', String(10), primary_key=True, nullable=True),
- Column('col2', String(10), primary_key=True, nullable=True),
- Column('col3', String(50))
- )
+ Column('col1', String(10), primary_key=True, nullable=True),
+ Column('col2', String(10), primary_key=True, nullable=True),
+ Column('col3', String(50)))
@classmethod
def setup_classes(cls):
s.commit()
t.col1 = "1"
- s.commit()
\ No newline at end of file
+ s.commit()
session.flush()
- #pid = parent.id
+ # pid = parent.id
c1id = c1.id
c2id = c2.id
session.flush()
- #pid = parent.id
+ # pid = parent.id
c1id = c1.id
c2id = c2.id
{'nodes_id': n3.id, 'parent_id': None},
{'nodes_id': n2.id, 'parent_id': None}
]
- )
+ )
),
CompiledSQL(
"DELETE FROM nodes WHERE nodes.id = :id", lambda ctx: {
sess.close()
+
class RowswitchM2OTest(fixtures.MappedTest):
# tests for #3060 and related issues
)
)
+
class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults):
"""test support for custom datatypes that return a non-__bool__ value
when compared via __eq__(), eg. ticket 3469"""
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.testing import AssertsCompiledSQL
+from .inheritance import _poly_fixtures
+
+
class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
def _fixture(self, cls, properties={}):
table = Table('point', MetaData(),
- Column('id', Integer(), primary_key=True),
- Column('x', Integer),
- Column('y', Integer))
+ Column('id', Integer(), primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer))
mapper(cls, table, properties=properties)
return table
class descriptor(object):
def __init__(self, fn):
self.fn = fn
+
def __get__(self, obj, owner):
if obj is not None:
return self.fn(obj, obj)
else:
return self
+
def method(self):
return 'method'
class Point(object):
center = (0, 0)
+
@descriptor
def thing(self, arg):
return arg.center
run_inserts = None
run_deletes = None
-
@classmethod
def setup_mappers(cls):
cls._setup_stock_mapping()
path[umapper.attrs.addresses][amapper]
[amapper.attrs.email_address],
PathRegistry.coerce((umapper, umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ amapper, amapper.attrs.email_address))
)
def test_entity_boolean(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ amapper, amapper.attrs.email_address))
is_(path[0], umapper)
is_(path[2], amapper)
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ amapper, amapper.attrs.email_address))
eq_(path[1], umapper.attrs.addresses)
eq_(path[3], amapper.attrs.email_address)
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ amapper, amapper.attrs.email_address))
eq_(path[1:3], (umapper.attrs.addresses, amapper))
def test_addition(self):
eq_(
p1 + p2,
PathRegistry.coerce((umapper, umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ amapper, amapper.attrs.email_address))
)
def test_length(self):
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((umapper, umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ amapper, amapper.attrs.email_address))
eq_(len(pneg1), 0)
eq_(len(p0), 1)
p4 = PathRegistry.coerce((u_alias, umapper.attrs.addresses))
p5 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p6 = PathRegistry.coerce((amapper, amapper.attrs.user, umapper,
- umapper.attrs.addresses))
+ umapper.attrs.addresses))
p7 = PathRegistry.coerce((amapper, amapper.attrs.user, umapper,
- umapper.attrs.addresses,
- amapper, amapper.attrs.email_address))
+ umapper.attrs.addresses,
+ amapper, amapper.attrs.email_address))
is_(p1 == p2, True)
is_(p1 == p3, False)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper,
- amapper.attrs.email_address))
+ amapper.attrs.email_address))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
eq_(
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
-
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper,
- amapper.attrs.email_address))
+ amapper.attrs.email_address))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
eq_(
PathRegistry.deserialize([(User, "addresses"),
- (Address, "email_address")]),
+ (Address, "email_address")]),
p1
)
eq_(
p3
)
-from .inheritance import _poly_fixtures
+
class PathRegistryInhTest(_poly_fixtures._Polymorphic):
run_setup_mappers = 'once'
run_inserts = None
emapper = inspect(Engineer)
p1 = PathRegistry.coerce((cmapper, cmapper.attrs.employees,
- pmapper, emapper.attrs.machines))
+ pmapper, emapper.attrs.machines))
# given a mapper and an attribute on a subclass,
# the path converts what you get to be against that subclass
p_alias = inspect(p_alias)
p1 = PathRegistry.coerce((c_alias, cmapper.attrs.employees,
- p_alias, emapper.attrs.machines))
+ p_alias, emapper.attrs.machines))
# plain AliasedClass - the path keeps that AliasedClass directly
# as is in the path
eq_(
p1.path,
(emapper, emapper.attrs.machines)
)
-
def test_collection(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
canary = Mock()
+
class User(fixtures.ComparableEntity):
@validates('addresses')
def validate_address(self, key, ad):
return ad
mapper(User, users, properties={
- 'addresses': relationship(Address)}
+ 'addresses': relationship(Address)}
)
mapper(Address, addresses)
sess = Session()
eq_(
sess.query(User).filter_by(name='edward').one(),
- User(name='edward', addresses=[Address(email_address='foo@bar.com')])
+ User(name='edward', addresses=[
+ Address(email_address='foo@bar.com')])
)
def test_validators_dict(self):
return key, value
u_m = mapper(User, users, properties={
- 'addresses': relationship(Address)
- }
- )
+ 'addresses': relationship(Address)})
mapper(Address, addresses)
eq_(
dict((k, v[0].__name__) for k, v in list(u_m.validators.items())),
{'name': 'validate_name',
- 'addresses': 'validate_address'}
+ 'addresses': 'validate_address'}
)
def test_validator_w_removes(self):
self.tables.addresses,
self.classes.Address)
canary = Mock()
+
class User(fixtures.ComparableEntity):
@validates('name', include_removes=True)
return item
mapper(User, users, properties={
- 'addresses': relationship(Address)
- })
+ 'addresses': relationship(Address)
+ })
mapper(Address, addresses)
u1 = User()
u1.addresses = [a2, a3]
eq_(canary.mock_calls, [
- call('name', 'ed', False),
- call('name', 'mary', False),
- call('name', 'mary', True),
- # append a1
- call('addresses', a1, False),
- # remove a1
- call('addresses', a1, True),
- # set to [a1, a2] - this is two appends
- call('addresses', a1, False), call('addresses', a2, False),
- # set to [a2, a3] - this is a remove of a1,
- # append of a3. the appends are first.
- call('addresses', a3, False),
- call('addresses', a1, True),
- ]
- )
+ call('name', 'ed', False),
+ call('name', 'mary', False),
+ call('name', 'mary', True),
+ # append a1
+ call('addresses', a1, False),
+ # remove a1
+ call('addresses', a1, True),
+ # set to [a1, a2] - this is two appends
+ call('addresses', a1, False), call('addresses', a2, False),
+ # set to [a2, a3] - this is a remove of a1,
+ # append of a3. the appends are first.
+ call('addresses', a3, False),
+ call('addresses', a1, True),
+ ])
def test_validator_multi_warning(self):
users = self.tables.users
def _test_validator_backrefs(self, include_backrefs, include_removes):
users, addresses = (self.tables.users,
- self.tables.addresses)
+ self.tables.addresses)
canary = Mock()
+
class User(fixtures.ComparableEntity):
if include_removes:
@validates('addresses', include_removes=True,
- include_backrefs=include_backrefs)
+ include_backrefs=include_backrefs)
def validate_address(self, key, item, remove):
canary(key, item, remove)
return item
else:
@validates('addresses', include_removes=False,
- include_backrefs=include_backrefs)
+ include_backrefs=include_backrefs)
def validate_address(self, key, item):
canary(key, item)
return item
class Address(fixtures.ComparableEntity):
if include_removes:
@validates('user', include_backrefs=include_backrefs,
- include_removes=True)
+ include_removes=True)
def validate_user(self, key, item, remove):
canary(key, item, remove)
return item
return item
mapper(User, users, properties={
- 'addresses': relationship(Address, backref="user")
- })
+ 'addresses': relationship(Address, backref="user")
+ })
mapper(Address, addresses)
u1 = User()
# backref for a2.user = None
call('user', None, False)
- ]
- )
+ ])
else:
eq_(calls,
[
call('user', User(addresses=[])),
call('addresses', Address(user=None)),
call('user', None)
- ]
- )
+ ])
else:
if include_removes:
eq_(calls,
[
- call('addresses', Address(), False),
- call('addresses', Address(user=None), False),
- call('user', User(addresses=[]), False),
- call('user', User(addresses=[]), True),
- call('addresses', Address(user=None), True)
- ]
-
- )
+ call('addresses', Address(), False),
+ call('addresses', Address(user=None), False),
+ call('user', User(addresses=[]), False),
+ call('user', User(addresses=[]), True),
+ call('addresses', Address(user=None), True)
+ ])
else:
eq_(calls,
[
call('addresses', Address()),
call('addresses', Address(user=None)),
call('user', User(addresses=[]))
- ]
- )
+ ])
import gevent.monkey
-gevent.monkey.patch_all()
+gevent.monkey.patch_all() # noqa
import logging
-logging.basicConfig()
-#logging.getLogger("sqlalchemy.pool").setLevel(logging.INFO)
+logging.basicConfig() # noqa
+# logging.getLogger("sqlalchemy.pool").setLevel(logging.INFO)
from sqlalchemy import event
import random
import sys
engine = create_engine('mysql+pymysql://scott:tiger@localhost/test',
pool_size=50, max_overflow=0)
+
@event.listens_for(engine, "connect")
def conn(*arg):
print "new connection!"
+
def worker():
while True:
conn = engine.connect()
conn.execute("SELECT 1+1")
gevent.sleep(random.random() * 1.01)
- except:
- #traceback.print_exc()
+ except Exception:
+ # traceback.print_exc()
sys.stderr.write('X')
else:
conn.close()
import warnings
-warnings.filterwarnings("ignore", r".*Decimal objects natively")
+warnings.filterwarnings("ignore", r".*Decimal objects natively") # noqa
# speed up cdecimal if available
try:
Base = declarative_base()
+
class Employee(Base):
__tablename__ = 'employee'
__mapper_args__ = {'polymorphic_on': type}
+
class Boss(Employee):
__tablename__ = 'boss'
__mapper_args__ = {'polymorphic_identity': 'boss'}
+
class Grunt(Employee):
__tablename__ = 'grunt'
employer_id = Column(Integer, ForeignKey('boss.id'))
employer = relationship("Boss", backref="employees",
- primaryjoin=Boss.id == employer_id)
+ primaryjoin=Boss.id == employer_id)
__mapper_args__ = {'polymorphic_identity': 'grunt'}
+
if os.path.exists('orm2010.db'):
os.remove('orm2010.db')
# use a file based database so that cursor.execute() has some
sess = Session(engine)
+
def runit(status, factor=1, query_runs=5):
num_bosses = 100 * factor
num_grunts = num_bosses * 100
# handful of bosses
batch_size = 100
batch_num = (num_grunts - len(grunts)) / batch_size
- boss = sess.query(Boss).\
- filter_by(name="Boss %d" % batch_num).\
- first()
+ boss = sess.query(Boss).filter_by(name="Boss %d" % batch_num).first()
for grunt in grunts[0:batch_size]:
grunt.employer = boss
sess.close() # close out the session
+
def run_with_profile(runsnake=False, dump=False):
import cProfile
import pstats
cProfile.runctx('runit(status)', globals(), locals(), filename)
stats = pstats.Stats(filename)
- counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats)
+ counts_by_methname = dict((key[2],
+ stats.stats[key][0]) for key in stats.stats)
print("SQLA Version: %s" % __version__)
print("Total calls %d" % stats.total_calls)
print("Total cpu seconds: %.2f" % stats.total_tt)
- print('Total execute calls: %d' \
- % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' "
- "objects>"])
- print('Total executemany calls: %d' \
- % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' "
- "objects>", 0))
+ print('Total execute calls: %d'
+ % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' "
+ "objects>"])
+ print('Total executemany calls: %d'
+ % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' "
+ "objects>", 0))
if dump:
stats.sort_stats('time', 'calls')
runit(status, 10)
print("Total time: %d" % (time.time() - now))
+
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--profile', action='store_true',
- help='run shorter test suite w/ cprofilng')
+ help='run shorter test suite w/ cprofilng')
parser.add_argument('--dump', action='store_true',
- help='dump full call profile (implies --profile)')
+ help='dump full call profile (implies --profile)')
parser.add_argument('--runsnake', action='store_true',
- help='invoke runsnakerun (implies --profile)')
+ help='invoke runsnakerun (implies --profile)')
args = parser.parse_args()
LambdaPredicate,\
requires_tag
+
def no_support(db, reason):
return SpecPredicate(db, description=reason)
+
def exclude(db, op, spec, description=None):
return SpecPredicate(db, op, spec, description=description)
+
class DefaultRequirements(SuiteRequirements):
@property
def deferrable_or_no_constraints(self):
without being in the context of a typed column.
"""
- return skip_if(["firebird", "mssql+mxodbc"],
- "not supported by driver")
+ return skip_if(["firebird", "mssql+mxodbc"], "not supported by driver")
@property
def identity(self):
"""Target database must support GENERATED AS IDENTITY or a facsimile.
Includes GENERATED AS IDENTITY, AUTOINCREMENT, AUTO_INCREMENT, or other
- column DDL feature that fills in a DB-generated identifier at INSERT-time
- without requiring pre-execution of a SEQUENCE or other artifact.
+ column DDL feature that fills in a DB-generated identifier at
+ INSERT-time without requiring pre-execution of a SEQUENCE or other
+ artifact.
"""
return skip_if(["firebird", "oracle", "postgresql", "sybase"],
- "not supported by database"
- )
+ "not supported by database")
@property
def temporary_tables(self):
PKs assuming they were reflected.
this is essentially all the DBs in "identity" plus PostgreSQL, which
- has SERIAL support. FB and Oracle (and sybase?) require the Sequence to
- be explicitly added, including if the table was reflected.
+ has SERIAL support. FB and Oracle (and sybase?) require the Sequence
+ to be explicitly added, including if the table was reflected.
"""
return skip_if(["firebird", "oracle", "sybase"],
- "not supported by database"
- )
+ "not supported by database")
@property
def insert_from_select(self):
"""target database/driver can allow BLOB/BINARY fields to be compared
against a bound parameter value.
"""
- return skip_if(["oracle", "mssql"],
- "not supported by database/driver"
- )
+ return skip_if(["oracle", "mssql"], "not supported by database/driver")
@property
def binary_literals(self):
# adding mssql here since it doesn't support comparisons either,
# have observed generally bad behavior with binary / mssql.
- return skip_if(["oracle", "mssql"],
- "not supported by database/driver"
- )
+ return skip_if(["oracle", "mssql"], "not supported by database/driver")
@property
def independent_cursors(self):
@property
def independent_connections(self):
- """Target must support simultaneous, independent database connections."""
+ """
+ Target must support simultaneous, independent database connections.
+ """
- # This is also true of some configurations of UnixODBC and probably win32
- # ODBC as well.
+ # This is also true of some configurations of UnixODBC and probably
+ # win32 ODBC as well.
return skip_if([
- no_support("sqlite",
- "independent connections disabled "
- "when :memory: connections are used"),
- exclude("mssql", "<", (9, 0, 0),
- "SQL Server 2005+ is required for "
- "independent connections"
- )
- ]
- )
+ no_support("sqlite",
+ "independent connections disabled "
+ "when :memory: connections are used"),
+ exclude("mssql", "<", (9, 0, 0),
+ "SQL Server 2005+ is required for "
+ "independent connections")])
@property
def updateable_autoincrement_pks(self):
"""Target must support UPDATE on autoincrement/integer primary key."""
return skip_if(["mssql", "sybase"],
- "IDENTITY columns can't be updated")
+ "IDENTITY columns can't be updated")
@property
def isolation_level(self):
return only_on(
- ('postgresql', 'sqlite', 'mysql', 'mssql'),
- "DBAPI has no isolation level support"
- ) + fails_on('postgresql+pypostgresql',
- 'pypostgresql bombs on multiple isolation level calls')
+ ('postgresql', 'sqlite', 'mysql', 'mssql'),
+ "DBAPI has no isolation level support") \
+ + fails_on('postgresql+pypostgresql',
+ 'pypostgresql bombs on multiple isolation level calls')
@property
def row_triggers(self):
# huh? TODO: implement triggers for PG tests, remove this
no_support('postgresql',
- 'PG triggers need to be implemented for tests'),
+ 'PG triggers need to be implemented for tests'),
])
@property
correlates to the parent."""
return skip_if("oracle", 'Raises "ORA-01799: a column may not be '
- 'outer-joined to a subquery"')
+ 'outer-joined to a subquery"')
@property
def update_from(self):
"""Target must support UPDATE..FROM syntax"""
return only_on(['postgresql', 'mssql', 'mysql'],
- "Backend does not support UPDATE..FROM")
-
+ "Backend does not support UPDATE..FROM")
@property
def update_where_target_in_subquery(self):
FROM documents GROUP BY documents.user_id
)
"""
- return fails_if('mysql', 'MySQL error 1093 "Cant specify target table '
- 'for update in FROM clause"')
+ return fails_if('mysql',
+ 'MySQL error 1093 "Cant specify target table '
+ 'for update in FROM clause"')
@property
def savepoints(self):
def update_nowait(self):
"""Target database must support SELECT...FOR UPDATE NOWAIT"""
return skip_if(["firebird", "mssql", "mysql", "sqlite", "sybase"],
- "no FOR UPDATE NOWAIT support"
- )
+ "no FOR UPDATE NOWAIT support")
@property
def subqueries(self):
return skip_if([
no_support('firebird', 'no SA implementation'),
no_support('mssql', 'two-phase xact not supported by drivers'),
- no_support('oracle', 'two-phase xact not implemented in SQLA/oracle'),
+ no_support('oracle',
+ 'two-phase xact not implemented in SQLA/oracle'),
no_support('drizzle', 'two-phase xact not supported by database'),
no_support('sqlite', 'two-phase xact not supported by database'),
- no_support('sybase', 'two-phase xact not supported by drivers/SQLA'),
+ no_support('sybase',
+ 'two-phase xact not supported by drivers/SQLA'),
no_support('postgresql+zxjdbc',
- 'FIXME: JDBC driver confuses the transaction state, may '
- 'need separate XA implementation'),
+ 'FIXME: JDBC driver confuses the transaction state, '
+ 'may need separate XA implementation'),
no_support('mysql',
- 'recent MySQL communiity editions have too many issues '
- '(late 2016), disabling for now'),
- ])
+ 'recent MySQL communiity editions have too many issues '
+ '(late 2016), disabling for now')])
@property
def two_phase_recovery(self):
@property
def empty_strings_varchar(self):
- """target database can persist/return an empty string with a varchar."""
+ """
+ target database can persist/return an empty string with a varchar.
+ """
return fails_if(["oracle"],
'oracle converts empty strings to a blank space')
@property
def unicode_connections(self):
- """Target driver must support some encoding of Unicode across the wire."""
+ """
+ Target driver must support some encoding of Unicode across the wire.
+ """
# TODO: expand to exclude MySQLdb versions w/ broken unicode
return skip_if([
exclude('mysql', '<', (4, 1, 1), 'no unicode connection support'),
"driver doesn't support 'sane' rowcount"
)
-
-
@property
def emulated_lastrowid(self):
""""target dialect retrieves cursor.lastrowid or an equivalent
after an insert() construct executes.
"""
return fails_on_everything_except('mysql',
- 'sqlite+pysqlite', 'sqlite+pysqlcipher',
- 'sybase', 'mssql')
+ 'sqlite+pysqlite',
+ 'sqlite+pysqlcipher',
+ 'sybase',
+ 'mssql')
@property
def implements_get_lastrowid(self):
"""
return skip_if('mssql+pymssql', 'crashes on pymssql') + \
- fails_on_everything_except('mysql',
- 'sqlite+pysqlite', 'sqlite+pysqlcipher')
+ fails_on_everything_except('mysql',
+ 'sqlite+pysqlite',
+ 'sqlite+pysqlcipher')
@property
def sane_multi_rowcount(self):
"""Target driver reflects the name of primary key constraints."""
return fails_on_everything_except('postgresql', 'oracle', 'mssql',
- 'sybase', 'sqlite')
+ 'sybase', 'sqlite')
@property
def array_type(self):
datetime.datetime() with microsecond objects."""
return skip_if(['mssql', 'mysql', 'firebird', '+zxjdbc',
- 'oracle', 'sybase'])
+ 'oracle', 'sybase'])
@property
def datetime_historic(self):
datetime.time() with microsecond objects."""
return skip_if(['mssql', 'mysql', 'firebird', '+zxjdbc',
- 'oracle', 'sybase'])
-
+ 'oracle', 'sybase'])
@property
def precision_numerics_general(self):
to represent very large values."""
return skip_if(
- [("sybase+pyodbc", None, None,
- "Don't know how do get these values through FreeTDS + Sybase"),
- ("firebird", None, None, "Precision must be from 1 to 18"),]
- )
+ [("sybase+pyodbc", None, None,
+ "Don't know how do get these values through FreeTDS + Sybase"),
+ ("firebird", None, None, "Precision must be from 1 to 18")])
@property
def precision_numerics_many_significant_digits(self):
"""
return fails_if(
- [('sqlite', None, None, 'TODO'),
- ("firebird", None, None, "Precision must be from 1 to 18"),
- ("sybase+pysybase", None, None, "TODO"),
- ('mssql+pymssql', None, None, 'FIXME: improve pymssql dec handling')]
- )
+ [('sqlite', None, None, 'TODO'),
+ ("firebird", None, None, "Precision must be from 1 to 18"),
+ ("sybase+pysybase", None, None, "TODO"),
+ ('mssql+pymssql', None, None,
+ 'FIXME: improve pymssql dec handling')]
+ )
@property
def precision_numerics_retains_significant_digits(self):
the .000 maintained."""
return fails_if(
- [
- ('oracle', None, None,
- "this may be a bug due to the difficulty in handling "
- "oracle precision numerics"),
- ("firebird", None, None,
- "database and/or driver truncates decimal places.")
- ]
- )
+ [
+ ('oracle', None, None,
+ "this may be a bug due to the difficulty in handling "
+ "oracle precision numerics"),
+ ("firebird", None, None,
+ "database and/or driver truncates decimal places.")
+ ]
+ )
@property
def precision_generic_float_type(self):
least seven decimal places when using the generic Float type."""
return fails_if([
- ('mysql', None, None,
- 'mysql FLOAT type only returns 4 decimals'),
- ('firebird', None, None,
- "firebird FLOAT type isn't high precision"),
- ])
+ ('mysql', None, None,
+ 'mysql FLOAT type only returns 4 decimals'),
+ ('firebird', None, None,
+ "firebird FLOAT type isn't high precision"),
+ ])
@property
def floats_to_four_decimals(self):
return fails_if([
- ("mysql+oursql", None, None, "Floating point error"),
- ("firebird", None, None,
- "Firebird still has FP inaccuracy even "
- "with only four decimal places"),
- ('mssql+pyodbc', None, None,
- 'mssql+pyodbc has FP inaccuracy even with '
- 'only four decimal places '
- ),
- ('mssql+pymssql', None, None,
- 'mssql+pymssql has FP inaccuracy even with '
- 'only four decimal places '),
- (
- 'postgresql+pg8000', None, None,
- 'postgresql+pg8000 has FP inaccuracy even with '
- 'only four decimal places '),
- (
- 'postgresql+psycopg2cffi', None, None,
- 'postgresql+psycopg2cffi has FP inaccuracy even with '
- 'only four decimal places '),
- ])
+ ("mysql+oursql", None, None, "Floating point error"),
+ ("firebird", None, None,
+ "Firebird still has FP inaccuracy even "
+ "with only four decimal places"),
+ ('mssql+pyodbc', None, None,
+ 'mssql+pyodbc has FP inaccuracy even with '
+ 'only four decimal places '),
+ ('mssql+pymssql', None, None,
+ 'mssql+pymssql has FP inaccuracy even with '
+ 'only four decimal places '),
+ ('postgresql+pg8000', None, None,
+ 'postgresql+pg8000 has FP inaccuracy even with '
+ 'only four decimal places '),
+ ('postgresql+psycopg2cffi', None, None,
+ 'postgresql+psycopg2cffi has FP inaccuracy even with '
+ 'only four decimal places ')])
@property
def fetch_null_from_numeric(self):
try:
config.db.execute("SELECT 'a=>1,a=>2'::hstore;")
return True
- except:
+ except Exception:
return False
return only_if(check_hstore)
try:
config.db.scalar("select '[1,2)'::int4range;")
return True
- except:
+ except Exception:
return False
return only_if(check_range_types)
-
@property
def oracle_test_dblink(self):
return skip_if(
@property
def order_by_label_with_expression(self):
return fails_if([
- ('firebird', None, None, "kinterbasdb doesn't send full type information"),
- ('postgresql', None, None, 'only simple labels allowed'),
- ('sybase', None, None, 'only simple labels allowed'),
- ('mssql', None, None, 'only simple labels allowed')
- ])
-
+ ('firebird', None, None,
+ "kinterbasdb doesn't send full type information"),
+ ('postgresql', None, None, 'only simple labels allowed'),
+ ('sybase', None, None, 'only simple labels allowed'),
+ ('mssql', None, None, 'only simple labels allowed')
+ ])
@property
def skip_mysql_on_windows(self):
"""Catchall for a large variety of MySQL on Windows failures"""
return skip_if(self._has_mysql_on_windows,
- "Not supported on MySQL + Windows"
- )
+ "Not supported on MySQL + Windows")
@property
def mssql_freetds(self):
@property
def selectone(self):
"""target driver must support the literal statement 'select 1'"""
- return skip_if(["oracle", "firebird"], "non-standard SELECT scalar syntax")
+ return skip_if(["oracle", "firebird"],
+ "non-standard SELECT scalar syntax")
@property
def mysql_fsp(self):
@property
def mysql_zero_date(self):
def check(config):
- row = config.db.execute("show variables like 'sql_mode'").first()
- return not row or "NO_ZERO_DATE" not in row[1]
+ row = config.db.execute("show variables like 'sql_mode'").first()
+ return not row or "NO_ZERO_DATE" not in row[1]
return only_if(check)
@property
def mysql_non_strict(self):
def check(config):
- row = config.db.execute("show variables like 'sql_mode'").first()
- return not row or "STRICT" not in row[1]
+ row = config.db.execute("show variables like 'sql_mode'").first()
+ return not row or "STRICT" not in row[1]
return only_if(check)
'x2': 1,
'y': 2})
-
def test_labels_no_collision(self):
t = table('foo', column('id'), column('foo_id'))
def test_val_is_null_coerced(self):
t = self._fixture()
- self.assert_compile(and_(t.c.id == None),
+ self.assert_compile(and_(t.c.id == None), # noqa
"foo.id IS NULL")
def test_val_and_None(self):
from sqlalchemy.testing import engines
from sqlalchemy.testing.assertions import expect_warnings
from sqlalchemy.testing import eq_
-from sqlalchemy.testing.assertsql import AllOf, RegexSQL, CompiledSQL, DialectSQL
+from sqlalchemy.testing.assertsql import (AllOf,
+ RegexSQL,
+ CompiledSQL,
+ DialectSQL)
from sqlalchemy.sql import table, column
),
)
-
-
-
-
-
@testing.requires.check_constraints
@testing.provide_metadata
def test_check_constraint_create(self):
RegexSQL("^CREATE TABLE events"),
AllOf(
CompiledSQL('CREATE UNIQUE INDEX ix_events_name ON events '
- '(name)'),
+ '(name)'),
CompiledSQL('CREATE INDEX ix_events_location ON events '
- '(location)'),
+ '(location)'),
CompiledSQL('CREATE UNIQUE INDEX sport_announcer ON events '
- '(sport, announcer)'),
+ '(sport, announcer)'),
CompiledSQL('CREATE INDEX idx_winners ON events (winner)'),
)
)
)
def test_deferrable_pk(self):
- factory = lambda **kw: PrimaryKeyConstraint('a', **kw)
+ def factory(**kw): return PrimaryKeyConstraint('a', **kw)
self._test_deferrable(factory)
def test_deferrable_table_fk(self):
- factory = lambda **kw: ForeignKeyConstraint(['b'], ['tbl.a'], **kw)
+ def factory(**kw): return ForeignKeyConstraint(['b'], ['tbl.a'], **kw)
self._test_deferrable(factory)
def test_deferrable_column_fk(self):
)
def test_deferrable_unique(self):
- factory = lambda **kw: UniqueConstraint('b', **kw)
+ def factory(**kw): return UniqueConstraint('b', **kw)
self._test_deferrable(factory)
def test_deferrable_table_check(self):
- factory = lambda **kw: CheckConstraint('a < b', **kw)
+ def factory(**kw): return CheckConstraint('a < b', **kw)
self._test_deferrable(factory)
def test_multiple(self):
schema.CreateIndex(constraint),
"CREATE INDEX name ON tbl (a + 5)"
)
-
-
cte = s1.cte(name="cte", recursive=True)
# can't do it here...
- #bar = select([cte]).cte('bar')
+ # bar = select([cte]).cte('bar')
cte = cte.union_all(
select([cte.c.x + 1]).where(cte.c.x < 10)
)
self.assert_compile(
stmt,
'WITH regional_sales AS (SELECT "order"."order" AS "order" '
- 'FROM "order") oracle suffix SELECT "order"."order" FROM "order", '
+ 'FROM "order") oracle suffix '
+ 'SELECT "order"."order" FROM "order", '
'regional_sales WHERE "order"."order" > regional_sales."order"',
dialect='oracle'
)
c = sa.ColumnDefault(fn)
c.arg("context")
-
@testing.fails_on('firebird', 'Data type unknown')
def test_standalone(self):
c = testing.db.engine.contextual_connect()
ctexec = sa.select(
[currenttime.label('now')], bind=testing.db).scalar()
- l = t.select().order_by(t.c.col1).execute()
+ result = t.select().order_by(t.c.col1).execute()
today = datetime.date.today()
- eq_(l.fetchall(), [
+ eq_(result.fetchall(), [
(x, 'imthedefault', f, ts, ts, ctexec, True, False,
12, today, 'py', 'hi', 'BINDfoo')
for x in range(51, 54)])
t.insert().execute({}, {}, {})
ctexec = currenttime.scalar()
- l = t.select().execute()
+ result = t.select().execute()
today = datetime.date.today()
- eq_(l.fetchall(),
+ eq_(result.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
t.insert().values([{}, {}, {}]).execute()
ctexec = currenttime.scalar()
- l = t.select().execute()
+ result = t.select().execute()
today = datetime.date.today()
- eq_(l.fetchall(),
+ eq_(result.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
def test_insert_values(self):
t.insert(values={'col3': 50}).execute()
- l = t.select().execute()
- eq_(50, l.first()['col3'])
+ result = t.select().execute()
+ eq_(50, result.first()['col3'])
@testing.fails_on('firebird', 'Data type unknown')
def test_updatemany(self):
{'pkval': 52},
{'pkval': 53})
- l = t.select().execute()
+ result = t.select().execute()
ctexec = currenttime.scalar()
today = datetime.date.today()
- eq_(l.fetchall(),
+ eq_(result.fetchall(),
[(51, 'im the update', f2, ts, ts, ctexec, False, False,
13, today, 'py', 'hi', 'BINDfoo'),
(52, 'im the update', f2, ts, ts, ctexec, True, False,
pk = r.inserted_primary_key[0]
t.update(t.c.col1 == pk).execute(col4=None, col5=None)
ctexec = currenttime.scalar()
- l = t.select(t.c.col1 == pk).execute()
- l = l.first()
- eq_(l,
+ result = t.select(t.c.col1 == pk).execute()
+ result = result.first()
+ eq_(result,
(pk, 'im the update', f2, None, None, ctexec, True, False,
13, datetime.date.today(), 'py', 'hi', 'BINDfoo'))
eq_(11, f2)
r = t.insert().execute()
pk = r.inserted_primary_key[0]
t.update(t.c.col1 == pk, values={'col3': 55}).execute()
- l = t.select(t.c.col1 == pk).execute()
- l = l.first()
- eq_(55, l['col3'])
+ result = t.select(t.c.col1 == pk).execute()
+ result = result.first()
+ eq_(55, result['col3'])
class CTEDefaultTest(fixtures.TablesTest):
try:
try:
self._test_autoincrement(con)
- except:
+ except Exception:
try:
tx.rollback()
- except:
+ except Exception:
pass
raise
else:
result = testing.db.execute(t.insert())
eq_(result.inserted_primary_key, [1])
+
cartitems = sometable = metadata = None
def test_funcfilter_criterion(self):
self.assert_compile(
func.count(1).filter(
- table1.c.name != None
+ table1.c.name != None # noqa
),
"count(:count_1) FILTER (WHERE mytable.name IS NOT NULL)"
)
def test_funcfilter_compound_criterion(self):
self.assert_compile(
func.count(1).filter(
- table1.c.name == None,
+ table1.c.name == None, # noqa
table1.c.myid > 0
),
"count(:count_1) FILTER (WHERE mytable.name IS NULL AND "
def test_funcfilter_label(self):
self.assert_compile(
select([func.count(1).filter(
- table1.c.description != None
+ table1.c.description != None # noqa
).label('foo')]),
"SELECT count(:count_1) FILTER (WHERE mytable.description "
"IS NOT NULL) AS foo FROM mytable"
self.assert_compile(
select([
func.max(table1.c.name).filter(
- literal_column('description') != None
+ literal_column('description') != None # noqa
)
]),
"SELECT max(mytable.name) FILTER (WHERE description "
cloned_traverse, ReplacingCloningVisitor
from sqlalchemy import exc
from sqlalchemy.sql import util as sql_util
-from sqlalchemy.testing import eq_, is_, is_not_, assert_raises, assert_raises_message
+from sqlalchemy.testing import (eq_,
+ is_,
+ is_not_,
+ assert_raises,
+ assert_raises_message)
A = B = t1 = t2 = t3 = table1 = table2 = table3 = table4 = None
t2alias = t2.alias('t2alias')
vis = sql_util.ClauseAdapter(t1alias)
- s = select([literal_column('*')], from_obj=[t1alias, t2alias]).as_scalar()
+ s = select([literal_column('*')],
+ from_obj=[t1alias, t2alias]).as_scalar()
assert t2alias in s._froms
assert t1alias in s._froms
'SELECT * FROM table2 AS t2alias WHERE '
't2alias.col1 = (SELECT * FROM table1 AS '
't1alias)')
- s = select([literal_column('*')], from_obj=[t1alias,
- t2alias]).correlate(t2alias).as_scalar()
+ s = select([literal_column('*')],
+ from_obj=[t1alias, t2alias]).correlate(t2alias).as_scalar()
self.assert_compile(select([literal_column('*')], t2alias.c.col1 == s),
'SELECT * FROM table2 AS t2alias WHERE '
't2alias.col1 = (SELECT * FROM table1 AS '
't2alias.col1 = (SELECT * FROM table1 AS '
't1alias)')
- s = select([literal_column('*')]).where(t1.c.col1 == t2.c.col1).as_scalar()
+ s = select([literal_column('*')]).where(t1.c.col1 == t2.c.col1) \
+ .as_scalar()
self.assert_compile(select([t1.c.col1, s]),
'SELECT table1.col1, (SELECT * FROM table2 '
'WHERE table1.col1 = table2.col1) AS '
'SELECT t1alias.col1, (SELECT * FROM '
'table2 WHERE t1alias.col1 = table2.col1) '
'AS anon_1 FROM table1 AS t1alias')
- s = select([literal_column('*')]).where(t1.c.col1
- == t2.c.col1).correlate(t1).as_scalar()
+ s = select([literal_column('*')]).where(t1.c.col1 == t2.c.col1) \
+ .correlate(t1).as_scalar()
self.assert_compile(select([t1.c.col1, s]),
'SELECT table1.col1, (SELECT * FROM table2 '
'WHERE table1.col1 = table2.col1) AS '
def test_table_to_alias_2(self):
t1alias = t1.alias('t1alias')
vis = sql_util.ClauseAdapter(t1alias)
- self.assert_compile(vis.traverse(select([literal_column('*')], from_obj=[t1])),
- 'SELECT * FROM table1 AS t1alias')
+ self.assert_compile(
+ vis.traverse(select([literal_column('*')], from_obj=[t1])),
+ 'SELECT * FROM table1 AS t1alias')
def test_table_to_alias_3(self):
t1alias = t1.alias('t1alias')
vis = sql_util.ClauseAdapter(t1alias)
- self.assert_compile(select([literal_column('*')], t1.c.col1 == t2.c.col2),
- 'SELECT * FROM table1, table2 WHERE '
- 'table1.col1 = table2.col2')
+ self.assert_compile(
+ select([literal_column('*')], t1.c.col1 == t2.c.col2),
+ 'SELECT * FROM table1, table2 WHERE table1.col1 = table2.col2')
def test_table_to_alias_4(self):
t1alias = t1.alias('t1alias')
vis = sql_util.ClauseAdapter(t1alias)
- self.assert_compile(vis.traverse(select([literal_column('*')], t1.c.col1
- == t2.c.col2)),
+ self.assert_compile(vis.traverse(select([literal_column('*')],
+ t1.c.col1 == t2.c.col2)),
'SELECT * FROM table1 AS t1alias, table2 '
'WHERE t1alias.col1 = table2.col2')
def test_table_to_alias_6(self):
t1alias = t1.alias('t1alias')
vis = sql_util.ClauseAdapter(t1alias)
- self.assert_compile(
- select([t1alias, t2]).where(
- t1alias.c.col1 == vis.traverse(
- select([literal_column('*')], t1.c.col1 == t2.c.col2, from_obj=[t1, t2]).
- correlate(t1)
- )
- ),
+ self.assert_compile(select([t1alias, t2]).where(
+ t1alias.c.col1 == vis.traverse(
+ select([literal_column('*')],
+ t1.c.col1 == t2.c.col2, from_obj=[t1, t2]).correlate(t1)
+ )
+ ),
"SELECT t1alias.col1, t1alias.col2, t1alias.col3, "
"table2.col1, table2.col2, table2.col3 "
"FROM table1 AS t1alias, table2 WHERE t1alias.col1 = "
self.assert_compile(
select([t1alias, t2]).
where(t1alias.c.col1 == vis.traverse(
- select([literal_column('*')], t1.c.col1 == t2.c.col2, from_obj=[t1, t2]).
+ select([literal_column('*')],
+ t1.c.col1 == t2.c.col2, from_obj=[t1, t2]).
correlate(t2))),
"SELECT t1alias.col1, t1alias.col2, t1alias.col3, "
"table2.col1, table2.col2, table2.col3 "
vis = sql_util.ClauseAdapter(t1alias)
t2alias = t2.alias('t2alias')
vis.chain(sql_util.ClauseAdapter(t2alias))
- self.assert_compile(vis.traverse(select([literal_column('*')], t1.c.col1
- == t2.c.col2)),
- 'SELECT * FROM table1 AS t1alias, table2 '
- 'AS t2alias WHERE t1alias.col1 = '
- 't2alias.col2')
+ self.assert_compile(
+ vis.traverse(
+ select([literal_column('*')], t1.c.col1 == t2.c.col2)),
+ 'SELECT * FROM table1 AS t1alias, table2 '
+ 'AS t2alias WHERE t1alias.col1 = '
+ 't2alias.col2')
def test_table_to_alias_15(self):
t1alias = t1.alias('t1alias')
assert_raises_message, fixtures, eq_, expect_warnings
from sqlalchemy.sql import crud
+
class _InsertTestBase(object):
@classmethod
Column('id', Integer, primary_key=True),
Column('x', Integer, default=10),
Column('y', Integer, server_default=text('5')),
- Column('z', Integer, default=lambda: 10)
- )
+ Column('z', Integer, default=lambda: 10))
class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
"SELECT mytable.foo, :bar AS anon_1 FROM mytable"
)
-
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(
x = Column('foo', Integer)
assert not hasattr(x, '__clause_element__')
-
table1 = self.table1
compiled = s.compile(dialect=self._length_fixture())
- assert set(compiled._create_result_map()['some_large_named_table__2'][1]).\
+ assert set(
+ compiled._create_result_map()['some_large_named_table__2'][1]).\
issuperset(
[
'some_large_named_table_this_is_the_data_column',
]
)
- assert set(compiled._create_result_map()['some_large_named_table__1'][1]).\
+ assert set(
+ compiled._create_result_map()['some_large_named_table__1'][1]).\
issuperset(
[
'some_large_named_table_this_is_the_primarykey_column',
set(compiled._create_result_map()['this_is_the_data_column'][1]).\
issuperset(['this_is_the_data_column',
s.c.this_is_the_data_column])
- assert \
- set(compiled._create_result_map()['this_is_the_primarykey__1'][1]).\
+ assert set(
+ compiled._create_result_map()['this_is_the_primarykey__1'][1]).\
issuperset(['this_is_the_primarykey_column',
'this_is_the_primarykey__1',
s.c.this_is_the_primarykey_column])
"AS anon_1", dialect=dialect)
compiled = s.compile(dialect=dialect)
- assert set(compiled._create_result_map()['anon_1_this_is_the_data_3'][1]).\
+ assert set(
+ compiled._create_result_map()['anon_1_this_is_the_data_3'][1]).\
issuperset([
'anon_1_this_is_the_data_3',
q.corresponding_column(
table1.c.this_is_the_data_column)
])
- assert set(compiled._create_result_map()['anon_1_this_is_the_prim_1'][1]).\
+ assert set(
+ compiled._create_result_map()['anon_1_this_is_the_prim_1'][1]).\
issuperset([
'anon_1_this_is_the_prim_1',
q.corresponding_column(
def test_bind_param_non_truncated(self):
table1 = self.table1
stmt = table1.insert().values(
- this_is_the_data_column=
- bindparam("this_is_the_long_bindparam_name")
+ this_is_the_data_column=bindparam(
+ "this_is_the_long_bindparam_name")
)
compiled = stmt.compile(dialect=self._length_fixture(length=10))
eq_(
set(compiled._create_result_map()),
set(['tablename_columnn_1', 'tablename_columnn_2'])
)
-
-
"LATERAL generate_series(:generate_series_1, "
"bookcases.bookcase_shelves) AS anon_1 ON true"
)
-
-
-
eq_(getattr(fk2c, k), kw[k])
def test_check_constraint_copy(self):
- r = lambda x: x
+ def r(x): return x
c = CheckConstraint("foo bar",
name='name',
initially=True,
eq_(list(i.columns), [])
assert i.table is t
-
def test_separate_decl_columns(self):
m = MetaData()
t = Table('t', m, Column('x', Integer))
self._loop_test(operator, right)
def _loop_test(self, operator, *arg):
- l = LoopOperate()
+ loop = LoopOperate()
is_(
- operator(l, *arg),
+ operator(loop, *arg),
operator
)
col = Column('x', self.MyType())
self.assert_compile(
- col[8] != None,
+ col[8] != None, # noqa
"(x -> :x_1) IS NOT NULL"
)
col2 = Column('y', Integer())
self.assert_compile(
- col[col2 + 8] != None,
+ col[col2 + 8] != None, # noqa
"(x -> (y + :y_1)) IS NOT NULL",
checkparams={'y_1': 8}
)
def test_operator_precedence_1(self):
self.assert_compile(
- self.table2.select((self.table2.c.field == 5) == None),
+ self.table2.select((self.table2.c.field == 5) == None), # noqa
"SELECT op.field FROM op WHERE (op.field = :field_1) IS NULL")
def test_operator_precedence_2(self):
def test_associativity_22(self):
f = column('f')
- self.assert_compile((f==f) == f, '(f = f) = f')
+ self.assert_compile((f == f) == f, '(f = f) = f')
def test_associativity_23(self):
f = column('f')
- self.assert_compile((f!=f) != f, '(f != f) != f')
+ self.assert_compile((f != f) != f, '(f != f) != f')
class IsDistinctFromTest(fixtures.TestBase, testing.AssertsCompiledSQL):
def test_in_21(self):
self.assert_compile(~self.table1.c.myid.in_(
- select([self.table2.c.otherid])),
- "mytable.myid NOT IN (SELECT myothertable.otherid FROM myothertable)")
+ select([self.table2.c.otherid])),
+ "mytable.myid NOT IN "
+ "(SELECT myothertable.otherid FROM myothertable)")
def test_in_22(self):
self.assert_compile(
)
), "mytable.myid IN ("
"SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1 "
- "UNION SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_2)")
+ "UNION SELECT mytable.myid FROM mytable "
+ "WHERE mytable.myid = :myid_2)")
def test_in_27(self):
# test that putting a select in an IN clause does not
order_by=[self.table2.c.othername],
limit=10, correlate=False)
),
- from_obj=[self.table1.join(self.table2,
- self.table1.c.myid == self.table2.c.otherid)],
+ from_obj=[self.table1.join(
+ self.table2,
+ self.table1.c.myid == self.table2.c.otherid)],
order_by=[self.table1.c.myid]
),
"SELECT mytable.myid, "
"FROM tab1 WHERE tab1.data < :data_1)",
checkparams={'data_1': 10, 'param_1': 5}
)
-
def test_bindparam_detection(self):
dialect = default.DefaultDialect(paramstyle='qmark')
- prep = lambda q: str(sql.text(q).compile(dialect=dialect))
+
+ def prep(q): return str(sql.text(q).compile(dialect=dialect))
def a_eq(got, wanted):
if got != wanted:
is_(bindparam('foo', 'bar').required, False)
is_(bindparam('foo', 'bar', required=True).required, True)
- c = lambda: None
+ def c(): return None
is_(bindparam('foo', callable_=c, required=True).required, True)
is_(bindparam('foo', callable_=c).required, False)
is_(bindparam('foo', callable_=c, required=False).required, False)
found = self._fetchall_sorted(ua.select().execute())
eq_(found, wanted)
+
t1 = t2 = t3 = None
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
+
metadata = flds = None
from sqlalchemy.sql import compiler
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, eq_
from sqlalchemy import testing
-from sqlalchemy.sql.elements import quoted_name, _truncated_label, _anonymous_label
+from sqlalchemy.sql.elements import (quoted_name,
+ _truncated_label,
+ _anonymous_label)
from sqlalchemy.testing.util import picklers
{'user_id': 9, 'user_name': 'fred'},
)
r = users.select().execute()
- l = []
+ rows = []
for row in r:
- l.append(row)
- eq_(len(l), 3)
+ rows.append(row)
+ eq_(len(rows), 3)
@testing.requires.subqueries
def test_anonymous_rows(self):
users.insert().execute(user_id=8, user_name='ed')
users.insert().execute(user_id=9, user_name='fred')
r = users.select().execute()
- l = []
+ rows = []
for row in r.fetchmany(size=2):
- l.append(row)
- eq_(len(l), 2)
+ rows.append(row)
+ eq_(len(rows), 2)
def test_column_slices(self):
users = self.tables.users
class MyList(object):
- def __init__(self, l):
- self.l = l
+ def __init__(self, data):
+ self.internal_list = data
def __len__(self):
- return len(self.l)
+ return len(self.internal_list)
def __getitem__(self, i):
- return list.__getitem__(self.l, i)
+ return list.__getitem__(self.internal_list, i)
proxy = RowProxy(object(), MyList(['value']), [None], {
'key': (None, None, 0), 0: (None, None, 0)})
r.close()
-
class KeyTargetingTest(fixtures.TablesTest):
run_inserts = 'once'
run_deletes = None
len(result._BufferedRowResultProxy__rowbuffer),
27
)
-
)
-
class ImplicitReturningFlag(fixtures.TestBase):
__backend__ = True
metadata = MetaData(testing.db)
employees_table = Table(
- 'employees', metadata, Column(
- 'employee_id', Integer, Sequence(
- 'employee_id_seq', optional=True), primary_key=True), Column(
- 'name', String(50)), Column(
- 'department', String(1)), )
+ 'employees', metadata,
+ Column(
+ 'employee_id', Integer,
+ Sequence('employee_id_seq', optional=True), primary_key=True),
+ Column('name', String(50)),
+ Column('department', String(1)))
metadata.create_all()
def setup(self):
assert u1.corresponding_column(table1.c.col3) is u1.c.col1
def test_singular_union(self):
- u = union(select([table1.c.col1, table1.c.col2, table1.c.col3]), select(
- [table1.c.col1, table1.c.col2, table1.c.col3]))
+ u = union(select([table1.c.col1, table1.c.col2, table1.c.col3]),
+ select([table1.c.col1, table1.c.col2, table1.c.col3]))
u = union(select([table1.c.col1, table1.c.col2, table1.c.col3]))
assert u.c.col1 is not None
assert u.c.col2 is not None
table1.c.col2,
table1.c.col3,
table1.c.colx,
- null().label('coly')]).union(select([table2.c.col1,
- table2.c.col2,
- table2.c.col3,
- null().label('colx'),
- table2.c.coly])).alias('analias')
+ null().label('coly')]).union(
+ select([table2.c.col1, table2.c.col2, table2.c.col3,
+ null().label('colx'), table2.c.coly])
+ ).alias('analias')
s1 = table1.select(use_labels=True)
s2 = table2.select(use_labels=True)
assert u.corresponding_column(s1.c.table1_col2) is u.c.col2
table1.c.col2,
table1.c.col3,
table1.c.colx,
- null().label('coly')]).union(select([table2.c.col1,
- table2.c.col2,
- table2.c.col3,
- null().label('colx'),
- table2.c.coly])).alias('analias')
+ null().label('coly')]).union(
+ select([table2.c.col1, table2.c.col2, table2.c.col3,
+ null().label('colx'), table2.c.coly])
+ ).alias('analias')
s = select([u])
s1 = table1.select(use_labels=True)
s2 = table2.select(use_labels=True)
table1.c.col2,
table1.c.col3,
table1.c.colx,
- null().label('coly')]).union(select([table2.c.col1,
- table2.c.col2,
- table2.c.col3,
- null().label('colx'),
- table2.c.coly])).alias('analias')
+ null().label('coly')]).union(
+ select([table2.c.col1, table2.c.col2, table2.c.col3,
+ null().label('colx'), table2.c.coly])
+ ).alias('analias')
j1 = table1.join(table2)
assert u.corresponding_column(j1.c.table1_colx) is u.c.colx
assert j1.corresponding_column(u.c.colx) is j1.c.table1_colx
s2 = select([s.label('c')])
self.assert_compile(
s2.select(),
- "SELECT c FROM (SELECT (SELECT (SELECT table1.col1 AS a FROM table1) AS b) AS c)"
+ "SELECT c FROM (SELECT (SELECT ("
+ "SELECT table1.col1 AS a FROM table1) AS b) AS c)"
)
def test_self_referential_select_raises(self):
Column('primary_language', String(50)),
)
managers = Table(
- 'managers', metadata, Column(
- 'person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column(
- 'status', String(30)), Column(
- 'manager_name', String(50)))
+ 'managers', metadata,
+ Column('person_id', Integer, ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('status', String(30)),
+ Column('manager_name', String(50)))
pjoin = \
people.outerjoin(engineers).outerjoin(managers).\
select(use_labels=True).alias('pjoin'
)
- eq_(util.column_set(sql_util.reduce_columns([pjoin.c.people_person_id,
- pjoin.c.engineers_person_id,
- pjoin.c.managers_person_id])),
+ eq_(util.column_set(sql_util.reduce_columns(
+ [pjoin.c.people_person_id, pjoin.c.engineers_person_id,
+ pjoin.c.managers_person_id])),
util.column_set([pjoin.c.people_person_id]))
def test_reduce_aliased_union(self):
select_from(page_table.join(magazine_page_table))
).alias('pjoin')
eq_(util.column_set(sql_util.reduce_columns(
- [pjoin.c.id, pjoin.c.page_id, pjoin.c.magazine_page_id])), util.column_set([pjoin.c.id]))
+ [pjoin.c.id, pjoin.c.page_id, pjoin.c.magazine_page_id])),
+ util.column_set([pjoin.c.id]))
# the first selectable has a CAST, which is a placeholder for
# classified_page.magazine_page_id in the second selectable.
join(classified_page_table))
).alias('pjoin')
eq_(util.column_set(sql_util.reduce_columns(
- [pjoin.c.id, pjoin.c.page_id, pjoin.c.magazine_page_id])), util.column_set([pjoin.c.id]))
+ [pjoin.c.id, pjoin.c.page_id, pjoin.c.magazine_page_id])),
+ util.column_set([pjoin.c.id]))
class DerivedTest(fixtures.TestBase, AssertsExecutionResults):
assert elem == {}
assert b2.left is not bin.left
- assert b3.left is not b2.left is not bin.left
+ assert b3.left is not b2.left and b2.left is not bin.left
assert b4.left is bin.left # since column is immutable
# deannotate copies the element
- assert bin.right is not b2.right is not b3.right is not b4.right
+ assert bin.right is not b2.right and b2.right is not b3.right \
+ and b3.right is not b4.right
def test_annotate_unique_traversal(self):
"""test that items are copied only once during
[Boolean]
)
+
class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
'SELECT alias.people_id FROM '
'people AS alias TABLESAMPLE system(1)'
)
-
-from sqlalchemy import Table, Column, String, func, MetaData, select, TypeDecorator, cast
+from sqlalchemy import (Table,
+ Column,
+ String,
+ func,
+ MetaData,
+ select,
+ TypeDecorator,
+ cast)
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy import inspection
from sqlalchemy import exc, types, util, dialects
from sqlalchemy.util import OrderedDict
-for name in dialects.__all__:
+for name in dialects.__all__: # noqa
__import__("sqlalchemy.dialects.%s" % name)
from sqlalchemy.sql import operators, column, table, null
from sqlalchemy.schema import CheckConstraint, AddConstraint
from sqlalchemy.testing import mock
-
-
class AdaptTest(fixtures.TestBase):
def _all_dialect_modules(self):
user_id=4, goofy='fred', goofy2='fred', goofy4=util.u('fred'),
goofy7=util.u('fred'), goofy8=9, goofy9=9)
- l = users.select().order_by(users.c.user_id).execute().fetchall()
+ result = users.select().order_by(users.c.user_id).execute().fetchall()
for assertstr, assertint, assertint2, row in zip(
[
"BIND_INjackBIND_OUT", "BIND_INlalaBIND_OUT",
"BIND_INfredBIND_OUT"],
[1200, 1500, 900],
[1800, 2250, 1350],
- l
+ result
):
for col in list(row)[1:5]:
eq_(col, assertstr)
[('BIND_INd1BIND_OUT', )])
-
class VariantTest(fixtures.TestBase, AssertsCompiledSQL):
def setup(self):
Column("id", Integer, primary_key=True),
Column('someenum', Enum('one', 'two', 'three', native_enum=False)),
Column('someotherenum',
- Enum('one', 'two', 'three',
- create_constraint=False, native_enum=False,
- validate_strings=True)),
+ Enum('one', 'two', 'three',
+ create_constraint=False, native_enum=False,
+ validate_strings=True)),
)
Table(
non_native_enum_table.insert(), {"id": 1, "someenum": None})
eq_(conn.scalar(select([non_native_enum_table.c.someenum])), None)
-
@testing.fails_on(
'mysql',
"The CHECK clause is parsed but ignored by all storage engines.")
"Enum('x', 'y', name='somename', "
"inherit_schema=True, native_enum=False)")
+
binary_table = MyPickleType = metadata = None
'data': LargeBinary, 'data_slice': LargeBinary},
bind=testing.db)
):
- l = stmt.execute().fetchall()
- eq_(stream1, l[0]['data'])
- eq_(stream1[0:100], l[0]['data_slice'])
- eq_(stream2, l[1]['data'])
- eq_(testobj1, l[0]['pickled'])
- eq_(testobj2, l[1]['pickled'])
- eq_(testobj3.moredata, l[0]['mypickle'].moredata)
- eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
+ result = stmt.execute().fetchall()
+ eq_(stream1, result[0]['data'])
+ eq_(stream1[0:100], result[0]['data_slice'])
+ eq_(stream2, result[1]['data'])
+ eq_(testobj1, result[0]['pickled'])
+ eq_(testobj2, result[1]['pickled'])
+ eq_(testobj3.moredata, result[0]['mypickle'].moredata)
+ eq_(result[0]['mypickle'].stuff, 'this is the right stuff')
@testing.requires.binary_comparisons
def test_comparison(self):
bindproc = expr.right.type._cached_literal_processor(non_str_dialect)
eq_(bindproc(expr.right.value), "'five'")
+
class ArrayTest(fixtures.TestBase):
def _myarray_fixture(self):
tab = table('test', column('bvalue', MyTypeDec))
expr = tab.c.bvalue + 6
-
self.assert_compile(
expr,
"test.bvalue || :bvalue_1",
# untyped bind - it gets assigned MyFoobarType
bp = bindparam("foo")
expr = column("foo", MyFoobarType) + bp
- assert bp.type._type_affinity is types.NullType
+ assert bp.type._type_affinity is types.NullType # noqa
assert expr.right.type._type_affinity is MyFoobarType
expr = column("foo", MyFoobarType) + bindparam("foo", type_=Integer)
)
-
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
else:
eq_(val, 46.583)
+
interval_table = metadata = None
boolean_table = self.tables.boolean_table
with testing.db.connect() as conn:
conn.execute(
- "insert into boolean_table (id, unconstrained_value) values (1, 5)"
+ "insert into boolean_table (id, unconstrained_value) "
+ "values (1, 5)"
)
eq_(
True
)
+
class PickleTest(fixtures.TestBase):
def test_eq_comparison(self):
):
assert p1.compare_values(p1.copy_value(obj), obj)
+
meta = None
(9, 'fred'),
(10, 'chuck')
),
- addresses = (
+ addresses=(
('id', 'user_id', 'name', 'email_address'),
(1, 7, 'x', 'jack@bean.com'),
(2, 8, 'x', 'ed@wood.com'),
(4, 8, 'x', 'ed@lala.com'),
(5, 9, 'x', 'fred@fred.com')
),
- dingalings = (
+ dingalings=(
('id', 'address_id', 'data'),
(1, 2, 'ding 1/2'),
(2, 5, 'ding 2/5')
def test_where_empty(self):
table1 = self.tables.mytable
self.assert_compile(
- table1.update().where(
- and_()),
- "UPDATE mytable SET myid=:myid, name=:name, description=:description")
+ table1.update().where(and_()),
+ "UPDATE mytable SET myid=:myid, name=:name, "
+ "description=:description")
self.assert_compile(
table1.update().where(
or_()),
- "UPDATE mytable SET myid=:myid, name=:name, description=:description")
+ "UPDATE mytable SET myid=:myid, name=:name, "
+ "description=:description")
def test_prefix_with(self):
table1 = self.tables.mytable
b3 = bindparam("bar", type_=Integer())
b4 = bindparam("foo", type_=String())
- c1 = lambda: 5 # noqa
- c2 = lambda: 6 # noqa
+ def c1(): return 5
+
+ def c2(): return 6
b5 = bindparam("foo", type_=Integer(), callable_=c1)
b6 = bindparam("foo", type_=Integer(), callable_=c2)