Platform upgrade data migration prep script

This commit is to prepare for upgrade data migration.

The data migration includes:
VIM export, postgres export, postgres and rabbitMQ filesystem
export,  etcd export, k8s config file export,
branding clean up.

Tests:
PASS: run system deploy start and have all data migration

Task: 48830
Story: 2010676
Change-Id: I7ba923de64a875207ca44076beff74e568aa2d9f
Signed-off-by: junfeng-li <junfeng.li@windriver.com>
This commit is contained in:
junfeng-li 2023-10-13 19:32:50 +00:00
parent 64ece53ffa
commit 95345b845a
2 changed files with 236 additions and 0 deletions

View File

@ -62,6 +62,8 @@ override_dh_install:
${ROOT}/etc/logrotate.d/software
install -m 755 scripts/platform-upgrade-precheck \
${ROOT}/usr/sbin/platform-upgrade-precheck
install -m 755 scripts/data-migration \
${ROOT}/usr/sbin/data-migration
install -m 444 ${METADATA_FILE} \
${ROOT}/etc/software/${METADATA_FILE}
dh_install

View File

@ -0,0 +1,234 @@
#!/usr/bin/python3
# -*- encoding: utf-8 -*-
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2023 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
Run platform upgrade prep data migration as a standalone executable
"""
import logging as LOG
import os
import subprocess
import sys
import upgrade_utils
POSTGRES_PATH = '/var/lib/postgresql'
DISTRIBUTED_CLOUD_ROLE_SYSTEMCONTROLLER = 'systemcontroller'
SERVICE_TYPE_IDENTITY = 'identity'
PLATFORM_PATH = "/opt/platform"
ETCD_PATH = '/opt/etcd'
RABBIT_PATH = '/var/lib/rabbitmq'
KUBERNETES_CONF_PATH = "/etc/kubernetes"
KUBERNETES_ADMIN_CONF_FILE = "admin.conf"
class DataMigration(object):
def __init__(self, rootdir, from_release, to_release):
try:
token, endpoint = upgrade_utils.get_endpoints_token(config=None, service_type="platform")
_sysinv_client = upgrade_utils.get_sysinv_client(token=token,
endpoint=endpoint)
system_attributes = _sysinv_client.isystem.list()[0]
self.distributed_cloud_role = system_attributes.distributed_cloud_role
self.shared_services = system_attributes.capabilities.get('shared_services', '')
except Exception:
LOG.exception("Failed to get host attributes from sysinv")
raise
dest_dir = os.path.join(POSTGRES_PATH, "upgrade")
try:
os.makedirs(dest_dir, 0o755, exist_ok=True)
except OSError:
LOG.exception("Failed to create upgrade export directory %s." %
dest_dir)
raise
self.dest_dir = dest_dir
self.from_release = from_release
self.to_release = to_release
self.rootdir = rootdir
def export_postgres(self):
devnull = open(os.devnull, 'w')
dest_dir = os.path.join(POSTGRES_PATH, "upgrade")
try:
upgrade_databases, upgrade_database_skip_tables = self._get_upgrade_databases()
# Dump roles, table spaces and schemas for databases.
subprocess.check_call([('sudo -u postgres pg_dumpall --clean ' +
'--schema-only > %s/%s' %
(dest_dir, 'postgres.postgreSql.config'))],
shell=True, stderr=devnull)
# Dump data for databases.
for _a, db_elem in enumerate(upgrade_databases):
db_cmd = 'sudo -u postgres pg_dump --format=plain --inserts '
db_cmd += '--disable-triggers --data-only %s ' % db_elem
for _b, table_elem in \
enumerate(upgrade_database_skip_tables[db_elem]):
db_cmd += '--exclude-table=%s ' % table_elem
db_cmd += '> %s/%s.postgreSql.data' % (dest_dir, db_elem)
subprocess.check_call([db_cmd], shell=True, stderr=devnull)
LOG.info("Exporting postgres databases completed")
except subprocess.CalledProcessError:
LOG.exception("Failed to export postgres databases for upgrade.")
raise
def _get_upgrade_databases(self):
"""Gets the list of databases to be upgraded
:returns: the list of databases to be upgraded
"""
system_role = self._get_host_attributes('distributed_cloud_role')
try:
shared_services = self._get_host_attributes('capabilities')['capabilities']['shared_services']
except KeyError:
shared_services = []
UPGRADE_DATABASES = ('postgres',
'template1',
'sysinv',
'barbican',
'fm',
)
UPGRADE_DATABASE_SKIP_TABLES = {'postgres': (),
'template1': (),
'sysinv': (),
'barbican': (),
'fm': ('alarm',),
}
if system_role == DISTRIBUTED_CLOUD_ROLE_SYSTEMCONTROLLER:
UPGRADE_DATABASES += ('dcmanager', 'dcorch',)
UPGRADE_DATABASE_SKIP_TABLES.update({
'dcmanager': (),
'dcorch': ('service', 'orch_job', 'orch_request',)
})
if SERVICE_TYPE_IDENTITY not in shared_services:
UPGRADE_DATABASES += ('keystone',)
UPGRADE_DATABASE_SKIP_TABLES.update({'keystone': ('token',)})
return UPGRADE_DATABASES, UPGRADE_DATABASE_SKIP_TABLES
def export_vim(self):
"""
Export VIM databases
"""
devnull = open(os.devnull, 'w')
try:
vim_cmd = ("nfv-vim-manage db-dump-data -d %s -f %s" %
(os.path.join(PLATFORM_PATH, 'nfv/vim', self.from_release),
os.path.join(self.dest_dir, 'vim.data')))
subprocess.check_call([vim_cmd], shell=True, stderr=devnull)
LOG.info("Exporting VIM completed")
except subprocess.CalledProcessError:
LOG.exception("Failed to export VIM databases for upgrade.")
raise
def export_etcd(self):
"""
Create symlink to etcd directory
"""
etcd_to_dir = os.path.join(ETCD_PATH, self.to_release)
etcd_from_dir = os.path.join(ETCD_PATH, self.from_release)
if not os.path.islink(etcd_to_dir):
os.symlink(etcd_from_dir, etcd_to_dir)
LOG.info("Creating symlink %s to %s completed", etcd_to_dir, etcd_from_dir)
def copy_kubernetes_conf(self):
"""
Copy /etc/kubernetes/admin.conf to $rootdir/user/etc/kubernetes/
"""
devnull = open(os.devnull, 'w')
try:
from_k8s_admin_file = os.path.join(KUBERNETES_CONF_PATH, KUBERNETES_ADMIN_CONF_FILE)
to_k8s_admin_file = os.path.join(self.rootdir, "user", KUBERNETES_CONF_PATH,
KUBERNETES_ADMIN_CONF_FILE)
subprocess.check_call(
["cp", from_k8s_admin_file, to_k8s_admin_file], stdout=devnull)
LOG.info("Copied %s to %s completed", from_k8s_admin_file, to_k8s_admin_file)
except subprocess.CalledProcessError:
LOG.exception("Failed to copy %s", from_k8s_admin_file)
raise
def update_branding(self):
"""
Remove branding tar files from the release N+1 directory as branding
files are not compatible between releases.
"""
devnull = open(os.devnull, 'w')
branding_files = os.path.join(
PLATFORM_PATH, "config", self.to_release, "branding", "*.tgz")
try:
subprocess.check_call(["rm -f %s" % branding_files], shell=True,
stdout=devnull)
LOG.info("Removed branding files %s completed", branding_files)
except subprocess.CalledProcessError:
LOG.exception("Failed to remove branding files %s", branding_files)
def main(rootdir, from_release, to_release):
LOG.basicConfig(filename='/var/log/upgrade-prep-data-migration.log', level=LOG.INFO)
data_migration = DataMigration(rootdir, from_release, to_release)
LOG.info("Running data migration from %s to %s" % (from_release, to_release))
try:
# export postgres databases
data_migration.export_postgres()
# Export VIM database
data_migration.export_vim()
# Export the filesystem so controller-1 can access it
data_migration.export_filesystem()
# Point N+1 etcd to N for now. We will migrate when both controllers are
# running N+1, during the swact back to controller-0. This solution will
# present some problems when we do upgrade etcd, so further development
# will be required at that time.
data_migration.export_etcd()
# Copy /etc/kubernetes/admin.conf to $rootdir/user/etc/kubernetes/
data_migration.copy_kubernetes_conf()
# Remove branding tar files from the release N+1 directory as branding
# files are not compatible between releases.
data_migration.update_branding()
LOG.info("Data migration completed successfully.")
except Exception as e:
LOG.exception("Data migration failed.")
return 1
return 0
if __name__ == "__main__":
if len(sys.argv) != 4:
print("Usage: %s <rootdir> <from_release> <to_release>" % sys.argv[0])
sys.exit(1)
rootdir = sys.argv[1]
from_release = sys.argv[2]
to_release = sys.argv[3]
sys.exit(main(rootdir, from_release, to_release))