summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJakub Libosvar <libosvar@redhat.com>2014-10-31 15:49:40 +0100
committerJakub Libosvar <libosvar@redhat.com>2014-11-24 09:37:22 +0000
commit467a4496cfcd74b473730d2baa667fd633e4d0b3 (patch)
tree1e45f9daa6f7fb23e28b06d0275ee841427bfba9
parent8ea580e39d51ecdf796b473e6220bd3d76654511 (diff)
downloadneutron-467a4496cfcd74b473730d2baa667fd633e4d0b3.tar.gz
Drop and recreate FK if adding new PK to routerl3bindings
Since MySQL 5.6 FK columns cannot be altered. As a workaround we can drop the FK before ALTER COLUMN and then recreate it. Change-Id: Icd1b3075cf29a6b0c477a4ddea2e6ebee91baef8 Closes-Bug: 1384555 (cherry picked from commit b5f1cc48b877caaebc944154d56a8bb4060aa9b0)
-rw-r--r--neutron/db/migration/__init__.py34
-rw-r--r--neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py115
2 files changed, 58 insertions, 91 deletions
diff --git a/neutron/db/migration/__init__.py b/neutron/db/migration/__init__.py
index 55c1443f1f..86cce385af 100644
--- a/neutron/db/migration/__init__.py
+++ b/neutron/db/migration/__init__.py
@@ -12,11 +12,13 @@
# License for the specific language governing permissions and limitations
# under the License.
+import contextlib
import functools
from alembic import context
from alembic import op
import sqlalchemy as sa
+from sqlalchemy.engine import reflection
def skip_if_offline(func):
@@ -122,3 +124,35 @@ def create_table_if_not_exist_psql(table_name, values):
"WHERE NOT table_exist(%(name)r);" %
{'name': table_name,
'columns': values})
+
+
+def remove_foreign_keys(table, foreign_keys):
+ for fk in foreign_keys:
+ op.drop_constraint(
+ name=fk['name'],
+ table_name=table,
+ type_='foreignkey'
+ )
+
+
+def create_foreign_keys(table, foreign_keys):
+ for fk in foreign_keys:
+ op.create_foreign_key(
+ name=fk['name'],
+ source=table,
+ referent=fk['referred_table'],
+ local_cols=fk['constrained_columns'],
+ remote_cols=fk['referred_columns'],
+ ondelete='CASCADE'
+ )
+
+
+@contextlib.contextmanager
+def remove_fks_from_table(table):
+ try:
+ inspector = reflection.Inspector.from_engine(op.get_bind())
+ foreign_keys = inspector.get_foreign_keys(table)
+ remove_foreign_keys(table, foreign_keys)
+ yield
+ finally:
+ create_foreign_keys(table, foreign_keys)
diff --git a/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py b/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py
index 0ba5f58be7..232156748e 100644
--- a/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py
+++ b/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py
@@ -27,22 +27,12 @@ down_revision = '37f322991f59'
from alembic import op
import sqlalchemy as sa
-from sqlalchemy.engine import reflection
+
+from neutron.db import migration
TABLE_NAME = 'routerl3agentbindings'
PK_NAME = 'pk_routerl3agentbindings'
-fk_names = {'postgresql':
- {'router_id':
- 'routerl3agentbindings_router_id_fkey',
- 'l3_agent_id':
- 'routerl3agentbindings_l3_agent_id_fkey'},
- 'mysql':
- {'router_id':
- 'routerl3agentbindings_ibfk_2',
- 'l3_agent_id':
- 'routerl3agentbindings_ibfk_1'}}
-
def upgrade():
# In order to sanitize the data during migration,
@@ -66,60 +56,32 @@ def upgrade():
op.drop_column(TABLE_NAME, 'id')
- # DB2 doesn't support nullable column in primary key
- if context.bind.dialect.name == 'ibm_db_sa':
- op.alter_column(
- table_name=TABLE_NAME,
- column_name='router_id',
- nullable=False
- )
- op.alter_column(
+ with migration.remove_fks_from_table(TABLE_NAME):
+ # DB2 doesn't support nullable column in primary key
+ if context.bind.dialect.name == 'ibm_db_sa':
+ op.alter_column(
+ table_name=TABLE_NAME,
+ column_name='router_id',
+ nullable=False
+ )
+ op.alter_column(
+ table_name=TABLE_NAME,
+ column_name='l3_agent_id',
+ nullable=False
+ )
+
+ op.create_primary_key(
+ name=PK_NAME,
table_name=TABLE_NAME,
- column_name='l3_agent_id',
- nullable=False
+ cols=['router_id', 'l3_agent_id']
)
- op.create_primary_key(
- name=PK_NAME,
- table_name=TABLE_NAME,
- cols=['router_id', 'l3_agent_id']
- )
-
def downgrade():
context = op.get_context()
dialect = context.bind.dialect.name
- # Drop the existed foreign key constraints
- # In order to perform primary key changes
- db2fks = {}
- if dialect == 'ibm_db_sa':
- # NOTE(mriedem): In DB2 the foreign key names are randomly generated
- # if you didn't originally explicitly name them, so the name is like
- # SQLxxxxx where the suffix is a random integer. Therefore we go
- # through and just drop all of the foreign keys and save them so we
- # can re-create them later after the primary key is dropped.
- inspector = reflection.Inspector.from_engine(op.get_bind().engine)
- db2fks = inspector.get_foreign_keys(TABLE_NAME)
- for fk in db2fks:
- op.drop_constraint(
- name=fk.get('name'),
- table_name=TABLE_NAME,
- type_='foreignkey'
- )
- else:
- op.drop_constraint(
- name=fk_names[dialect]['l3_agent_id'],
- table_name=TABLE_NAME,
- type_='foreignkey'
- )
- op.drop_constraint(
- name=fk_names[dialect]['router_id'],
- table_name=TABLE_NAME,
- type_='foreignkey'
- )
-
op.drop_constraint(
name=PK_NAME,
table_name=TABLE_NAME,
@@ -139,38 +101,9 @@ def downgrade():
nullable=False
)
- op.create_primary_key(
- name=PK_NAME,
- table_name=TABLE_NAME,
- cols=['id']
- )
-
- # Restore the foreign key constraints
- if dialect == 'ibm_db_sa':
- for fk in db2fks:
- op.create_foreign_key(
- name=fk.get('name'),
- source=TABLE_NAME,
- referent=fk.get('referred_table'),
- local_cols=fk.get('constrained_columns'),
- remote_cols=fk.get('referred_columns'),
- ondelete='CASCADE'
- )
- else:
- op.create_foreign_key(
- name=fk_names[dialect]['router_id'],
- source=TABLE_NAME,
- referent='routers',
- local_cols=['router_id'],
- remote_cols=['id'],
- ondelete='CASCADE'
- )
-
- op.create_foreign_key(
- name=fk_names[dialect]['l3_agent_id'],
- source=TABLE_NAME,
- referent='agents',
- local_cols=['l3_agent_id'],
- remote_cols=['id'],
- ondelete='CASCADE'
+ with migration.remove_fks_from_table(TABLE_NAME):
+ op.create_primary_key(
+ name=PK_NAME,
+ table_name=TABLE_NAME,
+ cols=['id']
)