that dictionary operations (assuming simple string keys) can operate upon a large \r
collection without loading the full collection at once.\r
\r
-This is something that may eventually be added as a feature to dynamic_loader() itself.\r
-\r
Similar approaches could be taken towards sets and dictionaries with non-string keys \r
although the hash policy of the members would need to be distilled into a filter() criterion.\r
\r
"""\r
\r
-class MyProxyDict(object):\r
+class ProxyDict(object):\r
def __init__(self, parent, collection_name, childclass, keyname):\r
self.parent = parent\r
self.collection_name = collection_name\r
self.childclass = childclass\r
self.keyname = keyname\r
- \r
+ \r
+ @property\r
def collection(self):\r
return getattr(self.parent, self.collection_name)\r
- collection = property(collection)\r
\r
def keys(self):\r
descriptor = getattr(self.childclass, self.keyname)\r
\r
from sqlalchemy.ext.declarative import declarative_base\r
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey\r
-from sqlalchemy.orm import sessionmaker, dynamic_loader\r
+from sqlalchemy.orm import sessionmaker, relation\r
\r
-Base = declarative_base(engine=create_engine('sqlite://'))\r
+engine=create_engine('sqlite://', echo=True)\r
+Base = declarative_base(engine)\r
\r
-class MyParent(Base):\r
+class Parent(Base):\r
__tablename__ = 'parent'\r
id = Column(Integer, primary_key=True)\r
name = Column(String(50))\r
- _collection = dynamic_loader("MyChild", cascade="all, delete-orphan")\r
+ _collection = relation("Child", lazy="dynamic", cascade="all, delete-orphan")\r
\r
+ @property\r
def child_map(self):\r
- return MyProxyDict(self, '_collection', MyChild, 'key')\r
- child_map = property(child_map)\r
+ return ProxyDict(self, '_collection', Child, 'key')\r
\r
-class MyChild(Base):\r
+class Child(Base):\r
__tablename__ = 'child'\r
id = Column(Integer, primary_key=True)\r
key = Column(String(50))\r
parent_id = Column(Integer, ForeignKey('parent.id'))\r
\r
+ def __repr__(self):\r
+ return "Child(key=%r)" % self.key\r
\r
Base.metadata.create_all()\r
\r
sess = sessionmaker()()\r
\r
-p1 = MyParent(name='p1')\r
+p1 = Parent(name='p1')\r
sess.add(p1)\r
\r
-p1.child_map['k1'] = k1 = MyChild(key='k1')\r
-p1.child_map['k2'] = k2 = MyChild(key='k2')\r
+print "\n---------begin setting nodes, autoflush occurs\n"\r
+p1.child_map['k1'] = Child(key='k1')\r
+p1.child_map['k2'] = Child(key='k2')\r
+\r
+# this will autoflush the current map.\r
+# ['k1', 'k2']\r
+print "\n---------print keys - flushes first\n"\r
+print p1.child_map.keys()\r
\r
-assert p1.child_map.keys() == ['k1', 'k2']\r
+# k1\r
+print "\n---------print 'k1' node\n"\r
+print p1.child_map['k1']\r
\r
-assert p1.child_map['k1'] is k1\r
+print "\n---------update 'k2' node - must find existing, and replace\n"\r
+p1.child_map['k2'] = Child(key='k2')\r
\r
-p1.child_map['k2'] = k2b = MyChild(key='k2')\r
-assert p1.child_map['k2'] is k2b\r
+print "\n---------print 'k2' key - flushes first\n"\r
+# k2\r
+print p1.child_map['k2']\r
\r
-assert sess.query(MyChild).all() == [k1, k2b]\r
+print "\n---------print all child nodes\n"\r
+# [k1, k2b]\r
+print sess.query(Child).all()\r
\r