add catalog export
Change-Id: I66a7b3e8136757979c96984242b2bbd5f390b9a0
This commit is contained in:
parent
1ed067cb57
commit
27db5cbc05
|
@ -9,6 +9,33 @@ CONF = config.CONF
|
|||
config.register_str('template_file', group='catalog')
|
||||
|
||||
|
||||
def parse_templates(template_lines):
|
||||
o = {}
|
||||
for line in template_lines:
|
||||
if ' = ' not in line:
|
||||
continue
|
||||
|
||||
k, v = line.strip().split(' = ')
|
||||
if not k.startswith('catalog.'):
|
||||
continue
|
||||
|
||||
parts = k.split('.')
|
||||
|
||||
region = parts[1]
|
||||
# NOTE(termie): object-store insists on having a dash
|
||||
service = parts[2].replace('_', '-')
|
||||
key = parts[3]
|
||||
|
||||
region_ref = o.get(region, {})
|
||||
service_ref = region_ref.get(service, {})
|
||||
service_ref[key] = v
|
||||
|
||||
region_ref[service] = service_ref
|
||||
o[region] = region_ref
|
||||
|
||||
return o
|
||||
|
||||
|
||||
class TemplatedCatalog(kvs.Catalog):
|
||||
"""A backend that generates endpoints for the Catalog based on templates.
|
||||
|
||||
|
@ -49,30 +76,7 @@ class TemplatedCatalog(kvs.Catalog):
|
|||
super(TemplatedCatalog, self).__init__()
|
||||
|
||||
def _load_templates(self, template_file):
|
||||
o = {}
|
||||
for line in open(template_file):
|
||||
if ' = ' not in line:
|
||||
continue
|
||||
|
||||
k, v = line.strip().split(' = ')
|
||||
if not k.startswith('catalog.'):
|
||||
continue
|
||||
|
||||
parts = k.split('.')
|
||||
|
||||
region = parts[1]
|
||||
# NOTE(termie): object-store insists on having a dash
|
||||
service = parts[2].replace('_', '-')
|
||||
key = parts[3]
|
||||
|
||||
region_ref = o.get(region, {})
|
||||
service_ref = region_ref.get(service, {})
|
||||
service_ref[key] = v
|
||||
|
||||
region_ref[service] = service_ref
|
||||
o[region] = region_ref
|
||||
|
||||
self.templates = o
|
||||
self.templates = parse_templates(open(template_file))
|
||||
|
||||
def get_catalog(self, user_id, tenant_id, metadata=None):
|
||||
d = dict(CONF.iteritems())
|
||||
|
|
|
@ -74,8 +74,25 @@ class ImportLegacy(BaseApp):
|
|||
migration.migrate_all()
|
||||
|
||||
|
||||
class ExportLegacyCatalog(BaseApp):
|
||||
"""Export the service catalog from a legacy database."""
|
||||
|
||||
name = 'export_legacy_catalog'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ExportLegacyCatalog, self).__init__(*args, **kw)
|
||||
self.add_param('old_db', nargs=1)
|
||||
|
||||
def main(self):
|
||||
from keystone.common.sql import legacy
|
||||
old_db = self.params.old_db[0]
|
||||
migration = legacy.LegacyMigration(old_db)
|
||||
print '\n'.join(migration.dump_catalog())
|
||||
|
||||
|
||||
CMDS = {'db_sync': DbSync,
|
||||
'import_legacy': ImportLegacy,
|
||||
'export_legacy_catalog': ExportLegacyCatalog,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
import re
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
from keystone.identity.backends import sql as identity_sql
|
||||
|
@ -23,6 +25,12 @@ def export_db(db):
|
|||
return migration_data
|
||||
|
||||
|
||||
def _translate_replacements(s):
|
||||
if '%' not in str(s):
|
||||
return s
|
||||
return re.sub(r'%([\w_]+)%', r'$(\1)s', s)
|
||||
|
||||
|
||||
class LegacyMigration(object):
|
||||
def __init__(self, db_string):
|
||||
self.db = sqlalchemy.create_engine(db_string)
|
||||
|
@ -41,9 +49,29 @@ class LegacyMigration(object):
|
|||
self._migrate_user_roles()
|
||||
self._migrate_tokens()
|
||||
|
||||
def dump_catalog(self, path):
|
||||
def dump_catalog(self):
|
||||
"""Generate the contents of a catalog templates file."""
|
||||
pass
|
||||
self._export_legacy_db()
|
||||
|
||||
services_by_id = dict((x['id'], x) for x in self._data['services'])
|
||||
template = 'catalog.%(region)s.%(service_type)s.%(key)s = %(value)s'
|
||||
|
||||
o = []
|
||||
for row in self._data['endpoint_templates']:
|
||||
service = services_by_id[row['service_id']]
|
||||
d = {'service_type': service['type'],
|
||||
'region': row['region']}
|
||||
|
||||
for x in ['internal_url', 'public_url', 'admin_url', 'enabled']:
|
||||
d['key'] = x.replace('_u', 'U')
|
||||
d['value'] = _translate_replacements(row[x])
|
||||
o.append(template % d)
|
||||
|
||||
d['key'] = 'name'
|
||||
d['value'] = service['desc']
|
||||
o.append(template % d)
|
||||
|
||||
return o
|
||||
|
||||
def _export_legacy_db(self):
|
||||
self._data = export_db(self.db)
|
||||
|
|
|
@ -11,6 +11,7 @@ from keystone.common.sql import legacy
|
|||
from keystone.common.sql import util as sql_util
|
||||
from keystone.identity.backends import sql as identity_sql
|
||||
from keystone.token.backends import sql as token_sql
|
||||
from keystone.catalog.backends import templated as catalog_templated
|
||||
|
||||
|
||||
|
||||
|
@ -18,62 +19,81 @@ CONF = config.CONF
|
|||
|
||||
|
||||
class ImportLegacy(test.TestCase):
|
||||
def setUp(self):
|
||||
super(ImportLegacy, self).setUp()
|
||||
CONF(config_files=[test.etcdir('keystone.conf'),
|
||||
test.testsdir('test_overrides.conf'),
|
||||
test.testsdir('backend_sql.conf')])
|
||||
sql_util.setup_test_database()
|
||||
self.identity_api = identity_sql.Identity()
|
||||
def setUp(self):
|
||||
super(ImportLegacy, self).setUp()
|
||||
CONF(config_files=[test.etcdir('keystone.conf'),
|
||||
test.testsdir('test_overrides.conf'),
|
||||
test.testsdir('backend_sql.conf')])
|
||||
sql_util.setup_test_database()
|
||||
self.identity_api = identity_sql.Identity()
|
||||
|
||||
def setup_old_database(self, sql_dump):
|
||||
sql_path = test.testsdir(sql_dump)
|
||||
db_path = test.testsdir('%s.db' % sql_dump)
|
||||
try:
|
||||
os.unlink(db_path)
|
||||
except OSError:
|
||||
pass
|
||||
script_str = open(sql_path).read().strip()
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.executescript(script_str)
|
||||
conn.commit()
|
||||
return db_path
|
||||
def setup_old_database(self, sql_dump):
|
||||
sql_path = test.testsdir(sql_dump)
|
||||
db_path = test.testsdir('%s.db' % sql_dump)
|
||||
try:
|
||||
os.unlink(db_path)
|
||||
except OSError:
|
||||
pass
|
||||
script_str = open(sql_path).read().strip()
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.executescript(script_str)
|
||||
conn.commit()
|
||||
return db_path
|
||||
|
||||
def test_import_d5(self):
|
||||
db_path = self.setup_old_database('legacy_d5.sqlite')
|
||||
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
|
||||
migration.migrate_all()
|
||||
def test_import_d5(self):
|
||||
db_path = self.setup_old_database('legacy_d5.sqlite')
|
||||
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
|
||||
migration.migrate_all()
|
||||
|
||||
admin_id = '1'
|
||||
user_ref = self.identity_api.get_user(admin_id)
|
||||
self.assertEquals(user_ref['name'], 'admin')
|
||||
admin_id = '1'
|
||||
user_ref = self.identity_api.get_user(admin_id)
|
||||
self.assertEquals(user_ref['name'], 'admin')
|
||||
self.assertEquals(user_ref['enabled'], True)
|
||||
|
||||
# check password hashing
|
||||
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
|
||||
user_id=admin_id, password='secrete')
|
||||
# check password hashing
|
||||
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
|
||||
user_id=admin_id, password='secrete')
|
||||
|
||||
def test_import_diablo(self):
|
||||
db_path = self.setup_old_database('legacy_diablo.sqlite')
|
||||
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
|
||||
migration.migrate_all()
|
||||
# check catalog
|
||||
self._check_catalog(migration)
|
||||
|
||||
admin_id = '1'
|
||||
user_ref = self.identity_api.get_user(admin_id)
|
||||
self.assertEquals(user_ref['name'], 'admin')
|
||||
def test_import_diablo(self):
|
||||
db_path = self.setup_old_database('legacy_diablo.sqlite')
|
||||
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
|
||||
migration.migrate_all()
|
||||
|
||||
# check password hashing
|
||||
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
|
||||
user_id=admin_id, password='secrete')
|
||||
admin_id = '1'
|
||||
user_ref = self.identity_api.get_user(admin_id)
|
||||
self.assertEquals(user_ref['name'], 'admin')
|
||||
self.assertEquals(user_ref['enabled'], True)
|
||||
|
||||
def test_import_essex(self):
|
||||
db_path = self.setup_old_database('legacy_essex.sqlite')
|
||||
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
|
||||
migration.migrate_all()
|
||||
# check password hashing
|
||||
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
|
||||
user_id=admin_id, password='secrete')
|
||||
|
||||
admin_id = 'c93b19ea3fa94484824213db8ac0afce'
|
||||
user_ref = self.identity_api.get_user(admin_id)
|
||||
self.assertEquals(user_ref['name'], 'admin')
|
||||
# check catalog
|
||||
self._check_catalog(migration)
|
||||
|
||||
# check password hashing
|
||||
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
|
||||
user_id=admin_id, password='secrete')
|
||||
def test_import_essex(self):
|
||||
db_path = self.setup_old_database('legacy_essex.sqlite')
|
||||
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
|
||||
migration.migrate_all()
|
||||
|
||||
admin_id = 'c93b19ea3fa94484824213db8ac0afce'
|
||||
user_ref = self.identity_api.get_user(admin_id)
|
||||
self.assertEquals(user_ref['name'], 'admin')
|
||||
self.assertEquals(user_ref['enabled'], True)
|
||||
|
||||
# check password hashing
|
||||
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
|
||||
user_id=admin_id, password='secrete')
|
||||
|
||||
# check catalog
|
||||
self._check_catalog(migration)
|
||||
|
||||
def _check_catalog(self, migration):
|
||||
catalog_lines = migration.dump_catalog()
|
||||
catalog = catalog_templated.parse_templates(catalog_lines)
|
||||
self.assert_('RegionOne' in catalog)
|
||||
self.assert_('compute' in catalog['RegionOne'])
|
||||
self.assert_('adminUrl' in catalog['RegionOne']['compute'])
|
||||
|
|
Loading…
Reference in New Issue