]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- [bug] Postgresql dialect memoizes that an ENUM of a
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 29 Oct 2011 21:38:56 +0000 (17:38 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 29 Oct 2011 21:38:56 +0000 (17:38 -0400)
particular name was processed
during a create/drop sequence.  This allows
a create/drop sequence to work without any
calls to "checkfirst", and also means with
"checkfirst" turned on it only needs to
check for the ENUM once.  [ticket:2311]

CHANGES
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/engine/ddl.py
lib/sqlalchemy/events.py
lib/sqlalchemy/types.py
test/dialect/test_postgresql.py
test/lib/engines.py
test/sql/test_types.py

diff --git a/CHANGES b/CHANGES
index a2a3f2eb7870510f32ccfa662e5901617cbd6522..0ae1d192d1a64d45b9e256fea849295029194578 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -81,6 +81,16 @@ CHANGES
       on dialect, but only works on Postgresql so far.
     Courtesy Manlio Perillo, [ticket:1679]
 
+- postgresql
+   - [bug] Postgresql dialect memoizes that an ENUM of a 
+     particular name was processed
+     during a create/drop sequence.  This allows
+     a create/drop sequence to work without any
+     calls to "checkfirst", and also means with
+     "checkfirst" turned on it only needs to 
+     check for the ENUM once.  [ticket:2311]
+
+
 0.7.3
 =====
 - general
index 3ae60f69624885e845c2e9c3040a2012e4d489fc..e3beeab79c5bbbf338f4c0ed35211322a150ee02 100644 (file)
@@ -449,15 +449,40 @@ class ENUM(sqltypes.Enum):
             bind.dialect.has_type(bind, self.name, schema=self.schema):
             bind.execute(DropEnumType(self))
 
-    def _on_table_create(self, event, target, bind, **kw):
-        self.create(bind=bind, checkfirst=True)
+    def _check_for_name_in_memos(self, checkfirst, kw):
+        """Look in the 'ddl runner' for 'memos', then
+        note our name in that collection.
+        
+        This to ensure a particular named enum is operated
+        upon only once within any kind of create/drop
+        sequence without relying upon "checkfirst".
 
-    def _on_metadata_create(self, event, target, bind, **kw):
-        if self.metadata is not None:
-            self.create(bind=bind, checkfirst=True)
+        """
+
+        if '_ddl_runner' in kw:
+            ddl_runner = kw['_ddl_runner']
+            if '_pg_enums' in ddl_runner.memo:
+                pg_enums = ddl_runner.memo['_pg_enums']
+            else:
+                pg_enums = ddl_runner.memo['_pg_enums'] = set()
+            present = self.name in pg_enums
+            pg_enums.add(self.name)
+            return present
+        else:
+            return False
+
+    def _on_table_create(self, target, bind, checkfirst, **kw):
+        if not self._check_for_name_in_memos(checkfirst, kw):
+            self.create(bind=bind, checkfirst=checkfirst)
+
+    def _on_metadata_create(self, target, bind, checkfirst, **kw):
+        if self.metadata is not None and \
+            not self._check_for_name_in_memos(checkfirst, kw):
+            self.create(bind=bind, checkfirst=checkfirst)
 
-    def _on_metadata_drop(self, event, target, bind, **kw):
-        self.drop(bind=bind, checkfirst=True)
+    def _on_metadata_drop(self, target, bind, checkfirst, **kw):
+        if not self._check_for_name_in_memos(checkfirst, kw):
+            self.drop(bind=bind, checkfirst=checkfirst)
 
 colspecs = {
     sqltypes.Interval:INTERVAL,
index 79958baae4065054c6d080adcefe41446fb47862..d6fdaee2ee7e45c8f7d0890d51c0748fdbddd278 100644 (file)
@@ -21,6 +21,7 @@ class SchemaGenerator(DDLBase):
         self.tables = tables and set(tables) or None
         self.preparer = dialect.identifier_preparer
         self.dialect = dialect
+        self.memo = {}
 
     def _can_create_table(self, table):
         self.dialect.validate_identifier(table.name)
@@ -56,7 +57,8 @@ class SchemaGenerator(DDLBase):
 
         metadata.dispatch.before_create(metadata, self.connection,
                                     tables=collection,
-                                    checkfirst=self.checkfirst)
+                                    checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
         for seq in seq_coll:
             self.traverse_single(seq, create_ok=True)
@@ -66,14 +68,16 @@ class SchemaGenerator(DDLBase):
 
         metadata.dispatch.after_create(metadata, self.connection,
                                     tables=collection,
-                                    checkfirst=self.checkfirst)
+                                    checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
     def visit_table(self, table, create_ok=False):
         if not create_ok and not self._can_create_table(table):
             return
 
         table.dispatch.before_create(table, self.connection,
-                                        checkfirst=self.checkfirst)
+                                        checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
         for column in table.columns:
             if column.default is not None:
@@ -86,7 +90,8 @@ class SchemaGenerator(DDLBase):
                 self.traverse_single(index)
 
         table.dispatch.after_create(table, self.connection,
-                                        checkfirst=self.checkfirst)
+                                        checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
     def visit_sequence(self, sequence, create_ok=False):
         if not create_ok and not self._can_create_sequence(sequence):
@@ -104,6 +109,7 @@ class SchemaDropper(DDLBase):
         self.tables = tables
         self.preparer = dialect.identifier_preparer
         self.dialect = dialect
+        self.memo = {}
 
     def visit_metadata(self, metadata):
         if self.tables:
@@ -117,7 +123,8 @@ class SchemaDropper(DDLBase):
 
         metadata.dispatch.before_drop(metadata, self.connection,
                                             tables=collection,
-                                            checkfirst=self.checkfirst)
+                                            checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
         for table in collection:
             self.traverse_single(table, drop_ok=True)
@@ -127,7 +134,8 @@ class SchemaDropper(DDLBase):
 
         metadata.dispatch.after_drop(metadata, self.connection,
                                             tables=collection,
-                                            checkfirst=self.checkfirst)
+                                            checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
     def _can_drop_table(self, table):
         self.dialect.validate_identifier(table.name)
@@ -155,7 +163,8 @@ class SchemaDropper(DDLBase):
             return
 
         table.dispatch.before_drop(table, self.connection,
-                                    checkfirst=self.checkfirst)
+                                    checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
         for column in table.columns:
             if column.default is not None:
@@ -164,7 +173,8 @@ class SchemaDropper(DDLBase):
         self.connection.execute(schema.DropTable(table))
 
         table.dispatch.after_drop(table, self.connection,
-                                        checkfirst=self.checkfirst)
+                                        checkfirst=self.checkfirst,
+                                            _ddl_runner=self)
 
     def visit_sequence(self, sequence, drop_ok=False):
         if not drop_ok and not self._can_drop_sequence(sequence):
index f7de851cc59639b023174d4bb4a3a0e981f1f9d4..eee2a7557f4fd0cb49a7374fa33f2ae562bf9b50 100644 (file)
@@ -78,10 +78,11 @@ class DDLEvents(event.Events):
         :param connection: the :class:`.Connection` where the
          CREATE statement or statements will be emitted.
         :param \**kw: additional keyword arguments relevant
-         to the event.  Currently this includes the ``tables``
-         argument in the case of a :class:`.MetaData` object,
-         which is the list of :class:`.Table` objects for which
-         CREATE will be emitted.
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other 
+         elements used by internal events.
 
         """
 
@@ -93,10 +94,11 @@ class DDLEvents(event.Events):
         :param connection: the :class:`.Connection` where the
          CREATE statement or statements have been emitted.
         :param \**kw: additional keyword arguments relevant
-         to the event.  Currently this includes the ``tables``
-         argument in the case of a :class:`.MetaData` object,
-         which is the list of :class:`.Table` objects for which
-         CREATE has been emitted.
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other 
+         elements used by internal events.
 
         """
 
@@ -108,10 +110,11 @@ class DDLEvents(event.Events):
         :param connection: the :class:`.Connection` where the
          DROP statement or statements will be emitted.
         :param \**kw: additional keyword arguments relevant
-         to the event.  Currently this includes the ``tables``
-         argument in the case of a :class:`.MetaData` object,
-         which is the list of :class:`.Table` objects for which
-         DROP will be emitted.
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other 
+         elements used by internal events.
 
         """
 
@@ -123,10 +126,11 @@ class DDLEvents(event.Events):
         :param connection: the :class:`.Connection` where the
          DROP statement or statements have been emitted.
         :param \**kw: additional keyword arguments relevant
-         to the event.  Currently this includes the ``tables``
-         argument in the case of a :class:`.MetaData` object,
-         which is the list of :class:`.Table` objects for which
-         DROP has been emitted.
+         to the event.  The contents of this dictionary
+         may vary across releases, and include the
+         list of tables being generated for a metadata-level
+         event, the checkfirst flag, and other 
+         elements used by internal events.
 
         """
 
index af98dd8139f951b8712de3dc9a80749300610011..ae15a5d169035c95738256ab8ea8367c4cfe9278 100644 (file)
@@ -30,7 +30,7 @@ from sqlalchemy.util import pickle
 from sqlalchemy.util.compat import decimal
 from sqlalchemy.sql.visitors import Visitable
 from sqlalchemy import util
-from sqlalchemy import processors, events
+from sqlalchemy import processors, events, event
 import collections
 default = util.importlater("sqlalchemy.engine", "default")
 
@@ -1686,26 +1686,45 @@ class SchemaType(events.SchemaEventTarget):
         self.schema = kw.pop('schema', None)
         self.metadata = kw.pop('metadata', None)
         if self.metadata:
-            self.metadata.append_ddl_listener('before-create',
-                    util.portable_instancemethod(self._on_metadata_create))
-            self.metadata.append_ddl_listener('after-drop',
-                    util.portable_instancemethod(self._on_metadata_drop))
+            event.listen(
+                self.metadata,
+                "before_create",
+                util.portable_instancemethod(self._on_metadata_create)
+            )
+            event.listen(
+                self.metadata,
+                "after_drop",
+                util.portable_instancemethod(self._on_metadata_drop)
+            )
 
     def _set_parent(self, column):
         column._on_table_attach(util.portable_instancemethod(self._set_table))
 
     def _set_table(self, column, table):
-        table.append_ddl_listener('before-create',
-                                  util.portable_instancemethod(
-                                        self._on_table_create))
-        table.append_ddl_listener('after-drop',
-                                  util.portable_instancemethod(
-                                        self._on_table_drop))
+        event.listen(
+            table,
+            "before_create",
+              util.portable_instancemethod(
+                    self._on_table_create)
+        )
+        event.listen(
+            table,
+            "after_drop",
+            util.portable_instancemethod(self._on_table_drop)
+        )
         if self.metadata is None:
-            table.metadata.append_ddl_listener('before-create',
-                    util.portable_instancemethod(self._on_metadata_create))
-            table.metadata.append_ddl_listener('after-drop',
-                    util.portable_instancemethod(self._on_metadata_drop))
+            # TODO: what's the difference between self.metadata
+            # and table.metadata here ?
+            event.listen(
+                table.metadata,
+                "before_create",
+                util.portable_instancemethod(self._on_metadata_create)
+            )
+            event.listen(
+                table.metadata,
+                "after_drop",
+                util.portable_instancemethod(self._on_metadata_drop)
+            )
 
     @property
     def bind(self):
@@ -1729,25 +1748,25 @@ class SchemaType(events.SchemaEventTarget):
         if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
             t.drop(bind=bind, checkfirst=checkfirst)
 
-    def _on_table_create(self, event, target, bind, **kw):
+    def _on_table_create(self, target, bind, **kw):
         t = self.dialect_impl(bind.dialect)
         if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
-            t._on_table_create(event, target, bind, **kw)
+            t._on_table_create(target, bind, **kw)
 
-    def _on_table_drop(self, event, target, bind, **kw):
+    def _on_table_drop(self, target, bind, **kw):
         t = self.dialect_impl(bind.dialect)
         if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
-            t._on_table_drop(event, target, bind, **kw)
+            t._on_table_drop(target, bind, **kw)
 
-    def _on_metadata_create(self, event, target, bind, **kw):
+    def _on_metadata_create(self, target, bind, **kw):
         t = self.dialect_impl(bind.dialect)
         if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
-            t._on_metadata_create(event, target, bind, **kw)
+            t._on_metadata_create(target, bind, **kw)
 
-    def _on_metadata_drop(self, event, target, bind, **kw):
+    def _on_metadata_drop(self, target, bind, **kw):
         t = self.dialect_impl(bind.dialect)
         if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
-            t._on_metadata_drop(event, target, bind, **kw)
+            t._on_metadata_drop(target, bind, **kw)
 
 class Enum(String, SchemaType):
     """Generic Enum Type.
index 487139102dfc93a6af63cf247fb78a886987abd0..b4b7e818cd765b4f0315aee301520b7d13521f96 100644 (file)
@@ -450,6 +450,30 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
         finally:
             metadata.drop_all(testing.db)
 
+    @testing.provide_metadata
+    def test_generate_multiple(self):
+        """Test that the same enum twice only generates once
+        for the create_all() call, without using checkfirst.
+        
+        A 'memo' collection held by the DDL runner
+        now handles this.
+        
+        """
+        metadata = self.metadata
+
+        e1 = Enum('one', 'two', 'three', 
+                            name="myenum")
+        t1 = Table('e1', metadata,
+            Column('c1', e1)
+        )
+
+        t2 = Table('e2', metadata,
+            Column('c1', e1)
+        )
+
+        metadata.create_all(checkfirst=False)
+        metadata.drop_all(checkfirst=False)
+
     def test_non_native_dialect(self):
         engine = engines.testing_engine()
         engine.connect()
index 649c3e31afa72b96a22cfb0ba05baf0ef4e065ed..c8a44dc44eb577c268b09b2df92d1d8855ca09e6 100644 (file)
@@ -247,12 +247,18 @@ def mock_engine(dialect_name=None):
     def assert_sql(stmts):
         recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer]
         assert  recv == stmts, recv
-
+    def print_sql():
+        d = engine.dialect
+        return "\n".join(
+            str(s.compile(dialect=d)) 
+            for s in engine.mock
+        )
     engine = create_engine(dialect_name + '://',
                            strategy='mock', executor=executor)
     assert not hasattr(engine, 'mock')
     engine.mock = buffer
     engine.assert_sql = assert_sql
+    engine.print_sql = print_sql
     return engine
 
 class DBAPIProxyCursor(object):
index 50cb0ba06684dcdbde9be5ec7eec17a572794b58..987e97f9bcf7d659d5315ddbf628abe5f4d6a925 100644 (file)
@@ -956,6 +956,20 @@ class EnumTest(fixtures.TestBase):
             {'id':4, 'someenum':'four'}
         )
 
+    def test_mock_engine_no_prob(self):
+        """ensure no 'checkfirst' queries are run when enums
+        are created with checkfirst=False"""
+
+        e = engines.mock_engine()
+        t = Table('t1', MetaData(),
+            Column('x', Enum("x", "y", name="pge"))
+        )
+        t.create(e, checkfirst=False)
+        # basically looking for the start of 
+        # the constraint, or the ENUM def itself,
+        # depending on backend.
+        assert "('x'," in e.print_sql()
+
 class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
     __excluded_on__ = (
         ('mysql', '<', (4, 1, 1)),  # screwy varbinary types