]> git.ipfire.org Git - thirdparty/sqlalchemy/alembic.git/commitdiff
- Fixed bug in batch mode where a table that had pre-existing indexes
authorMike Bayer <mike_mp@zzzcomputing.com>
Fri, 16 Oct 2015 16:06:01 +0000 (12:06 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Fri, 16 Oct 2015 16:06:01 +0000 (12:06 -0400)
would create the same index on the new table with the same name,
which on SQLite produces a naming conflict as index names are in a
global namespace on that backend.   Batch mode now defers the production
of both existing and new indexes until after the entire table transfer
operation is complete, which also means those indexes no longer take
effect during the INSERT from SELECT section as well; the indexes
are applied in a single step afterwards.
fixes #333

alembic/operations/batch.py
docs/build/changelog.rst
tests/test_batch.py

index 7135e3714a00304f760872232ac930f7403ed0ad..769cd92e9c0fdbc5ddf152316db830a11f0d2ce3 100644 (file)
@@ -128,6 +128,7 @@ class ApplyBatchImpl(object):
         self.named_constraints = {}
         self.unnamed_constraints = []
         self.indexes = {}
+        self.new_indexes = {}
         for const in self.table.constraints:
             if _is_type_bound(const):
                 continue
@@ -167,11 +168,18 @@ class ApplyBatchImpl(object):
                 self._setup_referent(m, const)
             new_table.append_constraint(const_copy)
 
-        for index in self.indexes.values():
-            Index(index.name,
-                  unique=index.unique,
-                  *[new_table.c[col] for col in index.columns.keys()],
-                  **index.kwargs)
+    def _gather_indexes_from_both_tables(self):
+        idx = []
+        idx.extend(self.indexes.values())
+        for index in self.new_indexes.values():
+            idx.append(
+                Index(
+                    index.name,
+                    unique=index.unique,
+                    *[self.new_table.c[col] for col in index.columns.keys()],
+                    **index.kwargs)
+            )
+        return idx
 
     def _setup_referent(self, metadata, constraint):
         spec = constraint.elements[0]._get_colspec()
@@ -227,6 +235,12 @@ class ApplyBatchImpl(object):
                 self.table.name,
                 schema=self.table.schema
             )
+            self.new_table.name = self.table.name
+            try:
+                for idx in self._gather_indexes_from_both_tables():
+                    op_impl.create_index(idx)
+            finally:
+                self.new_table.name = "_alembic_batch_temp"
 
     def alter_column(self, table_name, column_name,
                      nullable=None,
@@ -283,7 +297,7 @@ class ApplyBatchImpl(object):
             raise ValueError("No such constraint: '%s'" % const.name)
 
     def create_index(self, idx):
-        self.indexes[idx.name] = idx
+        self.new_indexes[idx.name] = idx
 
     def drop_index(self, idx):
         try:
index fdc011135638a712d877fcd20b2d35e93cbe513f..ce917516a93cdf539890cbeb33b23de9080c2dfc 100644 (file)
@@ -6,6 +6,19 @@ Changelog
 .. changelog::
     :version: 0.8.3
 
+    .. change::
+      :tags: bug, batch
+      :tickets: 333
+
+      Fixed bug in batch mode where a table that had pre-existing indexes
+      would create the same index on the new table with the same name,
+      which on SQLite produces a naming conflict as index names are in a
+      global namespace on that backend.   Batch mode now defers the production
+      of both existing and new indexes until after the entire table transfer
+      operation is complete, which also means those indexes no longer take
+      effect during the INSERT from SELECT section as well; the indexes
+      are applied in a single step afterwards.
+
     .. change::
       :tags: bug, tests
       :pullreq: bitbucket:47
index 0f0aada3436cacca0d74c28c1c1120766a9ecff2..c846845cdd9a79c6d6eddb381c4754497689c0d4 100644 (file)
@@ -180,8 +180,12 @@ class BatchApplyTest(TestBase):
         create_stmt = re.sub(r'[\n\t]', '', create_stmt)
 
         idx_stmt = ""
-        for idx in impl.new_table.indexes:
+        for idx in impl.indexes.values():
             idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect))
+        for idx in impl.new_indexes.values():
+            impl.new_table.name = impl.table.name
+            idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect))
+            impl.new_table.name = '_alembic_batch_temp'
         idx_stmt = re.sub(r'[\n\t]', '', idx_stmt)
 
         if ddl_contains:
@@ -192,8 +196,6 @@ class BatchApplyTest(TestBase):
         expected = [
             create_stmt,
         ]
-        if impl.new_table.indexes:
-            expected.append(idx_stmt)
 
         if schema:
             args = {"schema": "%s." % schema}
@@ -225,6 +227,8 @@ class BatchApplyTest(TestBase):
             'ALTER TABLE %(schema)s_alembic_batch_temp '
             'RENAME TO %(schema)stname' % args
         ])
+        if idx_stmt:
+            expected.append(idx_stmt)
         context.assert_(*expected)
         return impl.new_table
 
@@ -731,11 +735,11 @@ class CopyFromTest(TestBase):
             'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
             'data VARCHAR(50), '
             'x INTEGER, PRIMARY KEY (id))',
-            'CREATE UNIQUE INDEX ix_data ON _alembic_batch_temp (data)',
             'INSERT INTO _alembic_batch_temp (id, data, x) '
             'SELECT foo.id, foo.data, foo.x FROM foo',
             'DROP TABLE foo',
-            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
+            'ALTER TABLE _alembic_batch_temp RENAME TO foo',
+            'CREATE UNIQUE INDEX ix_data ON foo (data)',
         )
 
         context.clear_assertions()
@@ -787,11 +791,11 @@ class CopyFromTest(TestBase):
         context.assert_(
             'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
             'data INTEGER, x INTEGER, PRIMARY KEY (id))',
-            'CREATE UNIQUE INDEX ix_data ON _alembic_batch_temp (data)',
             'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
             'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
             'DROP TABLE foo',
-            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
+            'ALTER TABLE _alembic_batch_temp RENAME TO foo',
+            'CREATE UNIQUE INDEX ix_data ON foo (data)',
         )
 
         context.clear_assertions()
@@ -860,6 +864,17 @@ class BatchRoundTripTest(TestBase):
         )
         return nopk
 
+    def _table_w_index_fixture(self):
+        t = Table(
+            't_w_ix', self.metadata,
+            Column('id', Integer, primary_key=True),
+            Column('thing', Integer),
+            Column('data', String(20)),
+        )
+        Index('ix_thing', t.c.thing)
+        t.create(self.conn)
+        return t
+
     def tearDown(self):
         self.metadata.drop_all(self.conn)
         self.conn.close()
@@ -871,6 +886,22 @@ class BatchRoundTripTest(TestBase):
             data
         )
 
+    def test_ix_existing(self):
+        self._table_w_index_fixture()
+
+        with self.op.batch_alter_table("t_w_ix") as batch_op:
+            batch_op.alter_column('data', type_=String(30))
+            batch_op.create_index("ix_data", ["data"])
+
+        insp = Inspector.from_engine(config.db)
+        eq_(
+            sorted(insp.get_indexes('t_w_ix'), key=lambda idx: idx['name']),
+            [
+                {'unique': 0, 'name': 'ix_data', 'column_names': ['data']},
+                {'unique': 0, 'name': 'ix_thing', 'column_names': ['thing']}
+            ]
+        )
+
     def test_fk_points_to_me_auto(self):
         self._test_fk_points_to_me("auto")