Fix HA IP DB migration

Commit 6ddc59aedc created a fix for
the HA IP address schema, and updated the DB migraiton. That fix
missed the case where the same IP address was used as a valid HA IP
entry in different networks (the DB migration would fail because
multiple entries had the same AAP and network ID). This patch defers
updating the primary keys during the migration until after the data
has been migrated.

Change-Id: I8f071fcc20f4afb61a0f0333dd8e599154c45387
This commit is contained in:
Thomas Bachman
2021-10-06 03:03:00 +00:00
parent b4daddc046
commit fb458713ac
3 changed files with 37 additions and 53 deletions

View File

@@ -31,18 +31,25 @@ def upgrade():
bind = op.get_bind()
insp = sa.engine.reflection.Inspector.from_engine(bind)
if 'apic_ml2_ha_ipaddress_to_port_owner' in insp.get_table_names():
op.add_column('apic_ml2_ha_ipaddress_to_port_owner',
sa.Column('network_id',
sa.String(length=36),
nullable=False))
try:
from gbpservice.neutron.plugins.ml2plus.drivers.apic_aim import (
data_migrations)
session = sa.orm.Session(bind=bind, autocommit=True)
data_migrations.do_ha_ip_duplicate_entries_removal(session)
with session.begin(subtransactions=True):
data_migrations.do_ha_ip_duplicate_entries_removal(session)
data_migrations.do_ha_ip_network_id_insertion(session)
except ImportError:
util.warn("AIM schema present, but failed to import AIM libraries"
" - HA IP duplicate entries removal not completed.")
except Exception as e:
util.warn("Caught exception while HA IP duplicates removal: %s"
% e)
util.warn("Caught exception while migrating data in %s: %s"
% ('apic_ml2_ha_ipaddress_to_port_owner', e))
inspector = reflection.Inspector.from_engine(op.get_bind())
pk_constraint = inspector.get_pk_constraint(
@@ -51,28 +58,11 @@ def upgrade():
pk_constraint['name'],
table_name='apic_ml2_ha_ipaddress_to_port_owner',
type_='primary')
op.add_column('apic_ml2_ha_ipaddress_to_port_owner',
sa.Column('network_id',
sa.String(length=36),
nullable=False))
op.create_primary_key(
constraint_name='apic_ml2_ha_ipaddress_to_port_owner_pk',
table_name='apic_ml2_ha_ipaddress_to_port_owner',
columns=['ha_ip_address', 'network_id'])
try:
from gbpservice.neutron.plugins.ml2plus.drivers.apic_aim import (
data_migrations)
session = sa.orm.Session(bind=bind, autocommit=True)
data_migrations.do_ha_ip_network_id_insertion(session)
except ImportError:
util.warn("AIM schema present, but failed to import AIM libraries"
" - HA IP network id not inserted.")
except Exception as e:
util.warn("Caught exception inserting HA IP network id: %s"
% e)
def downgrade():
pass

View File

@@ -42,7 +42,6 @@ from sqlalchemy.orm import lazyload
from gbpservice.neutron.extensions import cisco_apic as ext
from gbpservice.neutron.plugins.ml2plus.drivers.apic_aim import apic_mapper
from gbpservice.neutron.plugins.ml2plus.drivers.apic_aim import db
# The following definitions have been taken from commit:
@@ -86,10 +85,14 @@ NetworkMapping = sa.Table(
sa.Column('vrf_tenant_name', sa.String(64), nullable=True))
# This is neeeded in order to make aueries against the table
# prior to the pirmary key change in the schema (the ORM is defined
# with the new primary keys, so it can't be used).
HAIPAddressToPortAssociation = sa.Table(
'apic_ml2_ha_ipaddress_to_port_owner', sa.MetaData(),
sa.Column('ha_ip_address', sa.String(64), nullable=False),
sa.Column('port_id', sa.String(64), nullable=False))
sa.Column('port_id', sa.String(64), nullable=False),
sa.Column('network_id', sa.String(36), nullable=False))
class DefunctAddressScopeExtensionDb(model_base.BASEV2):
@@ -439,15 +442,17 @@ def do_ha_ip_duplicate_entries_removal(session):
alembic_util.msg(
"Starting duplicate entries removal for HA IP table.")
# Define these for legibility/convenience.
haip_ip = HAIPAddressToPortAssociation.c.ha_ip_address
haip_port_id = HAIPAddressToPortAssociation.c.port_id
with session.begin(subtransactions=True):
haip_ip = HAIPAddressToPortAssociation.c.ha_ip_address
haip_port_id = HAIPAddressToPortAssociation.c.port_id
port_and_haip_dbs = (session.query(models_v2.Port,
HAIPAddressToPortAssociation).join(
HAIPAddressToPortAssociation,
haip_port_id == models_v2.Port.id))
net_to_ha_ip_dict = {}
for port_db, ha_ip, port_id in port_and_haip_dbs:
for port_db, ha_ip, port_id, network_id in port_and_haip_dbs:
ha_ip_dict = net_to_ha_ip_dict.setdefault(
port_db.network_id, {})
ha_ip_dict.setdefault(
@@ -469,26 +474,24 @@ def do_ha_ip_network_id_insertion(session):
alembic_util.msg(
"Starting network id insertion for HA IP table.")
# Define these for legibility/convenience.
haip_ip = HAIPAddressToPortAssociation.c.ha_ip_address
haip_port_id = HAIPAddressToPortAssociation.c.port_id
with session.begin(subtransactions=True):
objs = (session.query(db.HAIPAddressToPortAssociation).
options(lazyload('*')).all())
port_ids = [obj['port_id'] for obj in objs]
ports = (session.query(models_v2.Port).
filter(models_v2.Port.id.in_(port_ids)).all())
for obj in objs:
port_id = obj['port_id']
ipaddress = obj['ha_ip_address']
net_id = None
for port in ports:
if port['id'] == port_id:
net_id = port['network_id']
break
haip_ip = db.HAIPAddressToPortAssociation.ha_ip_address
haip_port_id = db.HAIPAddressToPortAssociation.port_id
session.query(db.HAIPAddressToPortAssociation).filter(
haip_ip == ipaddress).filter(
haip_port_id == port_id).update(
{'network_id': net_id})
haip_ip = HAIPAddressToPortAssociation.c.ha_ip_address
haip_port_id = HAIPAddressToPortAssociation.c.port_id
port_and_haip_dbs = (session.query(models_v2.Port,
HAIPAddressToPortAssociation).join(
HAIPAddressToPortAssociation,
haip_port_id == models_v2.Port.id))
for port_db, ha_ip, port_id, network_id in port_and_haip_dbs:
update_q = HAIPAddressToPortAssociation.update().where(
haip_ip == ha_ip).where(
haip_port_id == port_id).values(
{'network_id': port_db.network_id})
session.execute(update_q)
alembic_util.msg(
"Finished network id insertion for HA IP table.")

View File

@@ -5254,15 +5254,6 @@ class TestMigrations(ApicAimTestCase, db.DbMixin):
self.assertEqual(obj['port_id'], p1['id'])
self.assertEqual(obj['ha_ip_address'], owned_addr[0])
self.assertEqual(obj['network_id'], p1['network_id'])
# Wipe out network_id from HAIP object
haip_ip = db.HAIPAddressToPortAssociation.ha_ip_address
haip_port_id = db.HAIPAddressToPortAssociation.port_id
self.db_session.query(db.HAIPAddressToPortAssociation).filter(
haip_ip == owned_addr[0]).filter(
haip_port_id == p1['id']).update(
{'network_id': ''})
# check that network id value is wiped from the object
self.assertEqual(obj['network_id'], '')
# perform data migration
data_migrations.do_ha_ip_network_id_insertion(self.db_session)
# check that network id value is again added correctly to the obj