]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
merged pickleable schema items from trunk r2817
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 30 Jun 2007 00:32:11 +0000 (00:32 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 30 Jun 2007 00:32:11 +0000 (00:32 +0000)
CHANGES
lib/sqlalchemy/schema.py
lib/sqlalchemy/types.py
test/engine/reflection.py

diff --git a/CHANGES b/CHANGES
index 2bb9414053cfbe11c3c4ab9769e9af564345ade4..b40adbe94b75700e2ff0a3c7fd08a0d286f391ff 100644 (file)
--- a/CHANGES
+++ b/CHANGES
       to polymorphic mappers that are using a straight "outerjoin"
       clause
 - sql
+    - MetaData and all SchemaItems are safe to use with pickle.  slow
+      table reflections can be dumped into a pickled file to be reused later.
+      Just reconnect the engine to the metadata after unpickling. [ticket:619]
     - fixed grouping of compound selects to give correct results. will break
       on sqlite in some cases, but those cases were producing incorrect
       results anyway, sqlite doesn't support grouped compound selects
index 713064d9cc1eb9daa37a5ef0f3dd800e1f94dad8..f67278d7c686b08671e305e5d029c010ed2e4717 100644 (file)
@@ -1066,6 +1066,14 @@ class MetaData(SchemaItem):
         if engine or url:
             self.connect(engine or url, **kwargs)
 
+    def __getstate__(self):
+        return {'tables':self.tables, 'casesensitive':self._case_sensitive_setting}
+
+    def __setstate__(self, state):
+        self.tables = state['tables']
+        self._case_sensitive_setting = state['casesensitive']
+        self._engine = None
+        
     def is_bound(self):
         """return True if this MetaData is bound to an Engine."""
         return self._engine is not None
index f13a4114dc1fe04b9be3875bb0edd1aef1b94704..b7f9e6e9926b81c109a076adb7753d744da41afb 100644 (file)
@@ -52,7 +52,12 @@ class TypeEngine(AbstractType):
             return self._impl_dict.setdefault(dialect, dialect.type_descriptor(self))
         except KeyError:
             return self._impl_dict.setdefault(dialect, dialect.type_descriptor(self))
-
+    
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        d['_impl_dict'] = {}
+        return d
+        
     def get_col_spec(self):
         raise NotImplementedError()
 
index 66d5a5f04c53ae8227d8ef9a0fb738f16605ed86..1b6c73e28b2326deb54fb1aaad38ba667c23007c 100644 (file)
@@ -1,6 +1,6 @@
 from testbase import PersistTest
 import testbase
-
+import pickle
 import sqlalchemy.ansisql as ansisql
 
 from sqlalchemy import *
@@ -109,7 +109,7 @@ class ReflectionTest(PersistTest):
         finally:
             addresses.drop()
             users.drop()
-            
+    
     def testoverridecolumns(self):
         """test that you can override columns which contain foreign keys to other reflected tables"""
         meta = BoundMetaData(testbase.db)
@@ -336,50 +336,80 @@ class ReflectionTest(PersistTest):
         finally:
             meta.drop_all()
 
-            
-    def testtometadata(self):
+    def test_to_metadata(self):
         meta = MetaData()
-        meta2 = MetaData()
         
         table = Table('mytable', meta,
             Column('myid', Integer, primary_key=True),
-            Column('name', String, nullable=False),
+            Column('name', String(40), nullable=False),
             Column('description', String(30), CheckConstraint("description='hi'")),
-            UniqueConstraint('name')
+            UniqueConstraint('name'),
+            mysql_engine='InnoDB'
         )
         
         table2 = Table('othertable', meta,
             Column('id', Integer, primary_key=True),
-            Column('myid', Integer, ForeignKey('mytable.myid'))
+            Column('myid', Integer, ForeignKey('mytable.myid')),
+            mysql_engine='InnoDB'
             )
-            
-        
-        table_c = table.tometadata(meta2)
-        table2_c = table2.tometadata(meta2)
-
-        assert table is not table_c
-        assert table_c.c.myid.primary_key
-        assert not table_c.c.name.nullable 
-        assert table_c.c.description.nullable 
-        assert table.primary_key is not table_c.primary_key
-        assert [x.name for x in table.primary_key] == [x.name for x in table_c.primary_key]
-        assert list(table2_c.c.myid.foreign_keys)[0].column is table_c.c.myid
-        assert list(table2_c.c.myid.foreign_keys)[0].column is not table.c.myid
-        for c in table_c.c.description.constraints:
-            if isinstance(c, CheckConstraint):
-                break
-        else:
-            assert False
-        assert c.sqltext=="description='hi'"
         
-        for c in table_c.constraints:
-            if isinstance(c, UniqueConstraint):
-                break
-        else:
-            assert False
-        assert c.columns.contains_column(table_c.c.name)
-        assert not c.columns.contains_column(table.c.name)
+        def test_to_metadata():
+            meta2 = MetaData()
+            table_c = table.tometadata(meta2)
+            table2_c = table2.tometadata(meta2)
+            return (table_c, table2_c)
+            
+        def test_pickle():
+            meta.connect(testbase.db)
+            meta2 = pickle.loads(pickle.dumps(meta))
+            assert meta2.engine is None
+            return (meta2.tables['mytable'], meta2.tables['othertable'])
+
+        def test_pickle_via_reflect():
+            # this is the most common use case, pickling the results of a
+            # database reflection
+            meta2 = MetaData(engine=testbase.db)
+            t1 = Table('mytable', meta2, autoload=True)
+            t2 = Table('othertable', meta2, autoload=True)
+            meta3 = pickle.loads(pickle.dumps(meta2))
+            assert meta3.engine is None
+            assert meta3.tables['mytable'] is not t1
+            return (meta3.tables['mytable'], meta3.tables['othertable'])
+            
+        meta.create_all(testbase.db)    
+        try:
+            for test, has_constraints in ((test_to_metadata, True), (test_pickle, True), (test_pickle_via_reflect, False)):
+                table_c, table2_c = test()
+                assert table is not table_c
+                assert table_c.c.myid.primary_key
+                assert isinstance(table_c.c.myid.type, Integer)
+                assert isinstance(table_c.c.name.type, String)
+                assert not table_c.c.name.nullable 
+                assert table_c.c.description.nullable 
+                assert table.primary_key is not table_c.primary_key
+                assert [x.name for x in table.primary_key] == [x.name for x in table_c.primary_key]
+                assert list(table2_c.c.myid.foreign_keys)[0].column is table_c.c.myid
+                assert list(table2_c.c.myid.foreign_keys)[0].column is not table.c.myid
+                
+                # constraints dont get reflected for any dialect right now
+                if has_constraints:
+                    for c in table_c.c.description.constraints:
+                        if isinstance(c, CheckConstraint):
+                            break
+                    else:
+                        assert False
+                    assert c.sqltext=="description='hi'"
         
+                    for c in table_c.constraints:
+                        if isinstance(c, UniqueConstraint):
+                            break
+                    else:
+                        assert False
+                    assert c.columns.contains_column(table_c.c.name)
+                    assert not c.columns.contains_column(table.c.name)
+        finally:
+            meta.drop_all(testbase.db)
+            
     # mysql throws its own exception for no such table, resulting in 
     # a sqlalchemy.SQLError instead of sqlalchemy.NoSuchTableError.
     # this could probably be fixed at some point.