Adding info to backup files on run that occurred, more string cleanups, more mixins
This commit is contained in:
parent
f72e9deffb
commit
5496018cf9
@ -199,7 +199,13 @@ class PkgInstallComponent(ComponentBase, PackageBasedComponentMixin):
|
||||
return down.GitDownloader(self.distro, uri, target_dir, branch).download()
|
||||
|
||||
def _get_param_map(self, config_fn):
|
||||
return dict()
|
||||
return {
|
||||
'COMPONENT_DIR': self.component_dir,
|
||||
'APP_DIR': self.app_dir,
|
||||
'CONFIG_DIR': self.cfg_dir,
|
||||
'TRACE_DIR': self.trace_dir,
|
||||
'CONFIG_FN': config_fn,
|
||||
}
|
||||
|
||||
def _get_packages(self):
|
||||
pkg_list = list(self.packages)
|
||||
|
@ -70,17 +70,16 @@ SUB_TO_APP = {
|
||||
BIN_DIR = 'bin'
|
||||
|
||||
|
||||
class GlanceUninstaller(comp.PythonUninstallComponent):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonUninstallComponent.__init__(self, *args, **kargs)
|
||||
class GlanceMixin(object):
|
||||
|
||||
def known_options(self):
|
||||
return set(['no-load-images'])
|
||||
|
||||
def known_subsystems(self):
|
||||
return SUB_TO_APP.keys()
|
||||
|
||||
|
||||
class GlanceInstaller(comp.PythonInstallComponent):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonInstallComponent.__init__(self, *args, **kargs)
|
||||
def _get_config_files(self):
|
||||
return list(CONFIGS)
|
||||
|
||||
def _get_download_locations(self):
|
||||
places = list()
|
||||
@ -90,11 +89,15 @@ class GlanceInstaller(comp.PythonInstallComponent):
|
||||
})
|
||||
return places
|
||||
|
||||
def known_subsystems(self):
|
||||
return SUB_TO_APP.keys()
|
||||
|
||||
def _get_config_files(self):
|
||||
return list(CONFIGS)
|
||||
class GlanceUninstaller(GlanceMixin, comp.PythonUninstallComponent):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonUninstallComponent.__init__(self, *args, **kargs)
|
||||
|
||||
|
||||
class GlanceInstaller(GlanceMixin, comp.PythonInstallComponent):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonInstallComponent.__init__(self, *args, **kargs)
|
||||
|
||||
def post_install(self):
|
||||
comp.PythonInstallComponent.post_install(self)
|
||||
@ -107,10 +110,12 @@ class GlanceInstaller(comp.PythonInstallComponent):
|
||||
|
||||
def _get_source_config(self, config_fn):
|
||||
if config_fn == POLICY_JSON:
|
||||
# FIXME, maybe we shouldn't be sucking this from the checkout??
|
||||
fn = sh.joinpths(self.app_dir, 'etc', POLICY_JSON)
|
||||
contents = sh.load_file(fn)
|
||||
return (fn, contents)
|
||||
elif config_fn == LOGGING_CONF:
|
||||
# FIXME, maybe we shouldn't be sucking this from the checkout??
|
||||
fn = sh.joinpths(self.app_dir, 'etc', LOGGING_SOURCE_FN)
|
||||
contents = sh.load_file(fn)
|
||||
return (fn, contents)
|
||||
@ -129,25 +134,24 @@ class GlanceInstaller(comp.PythonInstallComponent):
|
||||
if config.getboolean('default', 'image_cache_enabled'):
|
||||
cache_dir = config.get('default', "image_cache_datadir")
|
||||
if cache_dir:
|
||||
LOG.info("Ensuring image cache data directory %s exists "\
|
||||
"(and is empty)" % (cache_dir))
|
||||
LOG.info("Ensuring image cache data directory %r exists (and is empty)" % (cache_dir))
|
||||
# Destroy then recreate the image cache directory
|
||||
sh.deldir(cache_dir)
|
||||
self.tracewriter.dirs_made(*sh.mkdirslist(cache_dir))
|
||||
if config.get('default', 'default_store') == 'file':
|
||||
file_dir = config.get('default', 'filesystem_store_datadir')
|
||||
if file_dir:
|
||||
LOG.info("Ensuring file system store directory %s exists and is empty." % (file_dir))
|
||||
LOG.info("Ensuring file system store directory %r exists and is empty." % (file_dir))
|
||||
# Delete existing images
|
||||
# and recreate the image directory
|
||||
sh.deldir(file_dir)
|
||||
self.tracewriter.dirs_made(*sh.mkdirslist(file_dir))
|
||||
log_filename = config.get('default', 'log_file')
|
||||
if log_filename:
|
||||
LOG.info("Ensuring log file %s exists and is empty." % (log_filename))
|
||||
LOG.info("Ensuring log file %r exists and is empty." % (log_filename))
|
||||
log_dir = sh.dirname(log_filename)
|
||||
if log_dir:
|
||||
LOG.info("Ensuring log directory %s exists." % (log_dir))
|
||||
LOG.info("Ensuring log directory %r exists." % (log_dir))
|
||||
self.tracewriter.dirs_made(*sh.mkdirslist(log_dir))
|
||||
# Destroy then recreate it (the log file)
|
||||
sh.unlink(log_filename)
|
||||
@ -155,18 +159,22 @@ class GlanceInstaller(comp.PythonInstallComponent):
|
||||
if config.getboolean('default', 'delayed_delete'):
|
||||
data_dir = config.get('default', 'scrubber_datadir')
|
||||
if data_dir:
|
||||
LOG.info("Ensuring scrubber data dir %s exists and is empty." % (data_dir))
|
||||
LOG.info("Ensuring scrubber data dir %r exists and is empty." % (data_dir))
|
||||
# Destroy then recreate the scrubber data directory
|
||||
sh.deldir(data_dir)
|
||||
self.tracewriter.dirs_made(*sh.mkdirslist(data_dir))
|
||||
# Nothing modified so just return the original
|
||||
return contents
|
||||
|
||||
def _get_image_dir(self):
|
||||
# This might be changed often so make it a function
|
||||
return sh.joinpths(self.component_dir, 'images')
|
||||
|
||||
def _get_param_map(self, config_fn):
|
||||
# This dict will be used to fill in the configuration
|
||||
# params with actual values
|
||||
mp = dict()
|
||||
mp['DEST'] = self.app_dir
|
||||
mp = comp.PythonInstallComponent._get_param_map(self, config_fn)
|
||||
mp['IMG_DIR'] = self._get_image_dir()
|
||||
mp['SYSLOG'] = self.cfg.getboolean("default", "syslog")
|
||||
mp['SQL_CONN'] = db.fetch_dbdsn(self.cfg, self.pw_gen, DB_NAME)
|
||||
mp['SERVICE_HOST'] = self.cfg.get('host', 'ip')
|
||||
@ -175,30 +183,24 @@ class GlanceInstaller(comp.PythonInstallComponent):
|
||||
return mp
|
||||
|
||||
|
||||
class GlanceRuntime(comp.PythonRuntime):
|
||||
class GlanceRuntime(GlanceMixin, comp.PythonRuntime):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonRuntime.__init__(self, *args, **kargs)
|
||||
self.bin_dir = sh.joinpths(self.app_dir, BIN_DIR)
|
||||
self.wait_time = max(self.cfg.getint('default', 'service_wait_seconds'), 1)
|
||||
|
||||
def known_subsystems(self):
|
||||
return SUB_TO_APP.keys()
|
||||
|
||||
def _get_apps_to_start(self):
|
||||
apps = list()
|
||||
for subsys in self.desired_subsystems:
|
||||
app = dict()
|
||||
app['name'] = SUB_TO_APP[subsys]
|
||||
app['path'] = sh.joinpths(self.bin_dir, app['name'])
|
||||
apps.append(app)
|
||||
apps.append({
|
||||
'name': SUB_TO_APP[subsys],
|
||||
'path': sh.joinpths(self.bin_dir, SUB_TO_APP[subsys]),
|
||||
})
|
||||
return apps
|
||||
|
||||
def _get_app_options(self, app):
|
||||
return APP_OPTIONS.get(app)
|
||||
|
||||
def known_options(self):
|
||||
return set(['no-load-images'])
|
||||
|
||||
def post_start(self):
|
||||
comp.PythonRuntime.post_start(self)
|
||||
if 'no-load-images' in self.options:
|
||||
|
@ -157,7 +157,7 @@ class HorizonInstaller(comp.PythonInstallComponent):
|
||||
def _get_param_map(self, config_fn):
|
||||
# This dict will be used to fill in the configuration
|
||||
# params with actual values
|
||||
mp = dict()
|
||||
mp = comp.PythonInstallComponent._get_param_map(self, config_fn)
|
||||
if config_fn == HORIZON_APACHE_CONF:
|
||||
(user, group) = self._get_apache_user_group()
|
||||
mp['GROUP'] = group
|
||||
|
@ -20,6 +20,7 @@ from urlparse import urlunparse
|
||||
|
||||
from devstack import cfg
|
||||
from devstack import component as comp
|
||||
from devstack import date
|
||||
from devstack import log as logging
|
||||
from devstack import shell as sh
|
||||
from devstack import utils
|
||||
@ -46,18 +47,25 @@ MANAGE_DATA_CONF = 'keystone_init.sh'
|
||||
MANAGE_CMD_ROOT = [sh.joinpths("/", "bin", 'bash')]
|
||||
MANAGE_ADMIN_USER = 'admin'
|
||||
MANAGE_DEMO_USER = 'demo'
|
||||
MANAGE_INVIS_USER = 'invisible_to_admin'
|
||||
MANGER_SERVICE_TENANT = 'service'
|
||||
|
||||
# Sync db command
|
||||
MANAGE_APP_NAME = 'keystone-manage'
|
||||
SYNC_DB_CMD = [sh.joinpths('%BINDIR%', MANAGE_APP_NAME), 'db_sync']
|
||||
SYNC_DB_CMD = [sh.joinpths('%BIN_DIR%', 'keystone-manage'),
|
||||
'--config-file=%s' % (sh.joinpths('%CONFIG_DIR%', ROOT_CONF)),
|
||||
'--debug', '-v',
|
||||
# Available commands:
|
||||
# db_sync: Sync the database.
|
||||
# export_legacy_catalog: Export the service catalog from a legacy database.
|
||||
# import_legacy: Import a legacy database.
|
||||
# import_nova_auth: Import a dump of nova auth data into keystone.
|
||||
'db_sync']
|
||||
|
||||
# What to start
|
||||
APP_NAME = 'keystone-all'
|
||||
APP_OPTIONS = {
|
||||
APP_NAME: ['--config-file', sh.joinpths('%CONFIG_DIR%', ROOT_CONF),
|
||||
"--debug", '-d',
|
||||
'--log-config=' + sh.joinpths('%CONFIG_DIR%', LOGGING_CONF)]
|
||||
APP_NAME: ['--config-file=%s' % (sh.joinpths('%CONFIG_DIR%', ROOT_CONF)),
|
||||
"--debug", '-v',
|
||||
'--log-config=%s' % (sh.joinpths('%CONFIG_DIR%', LOGGING_CONF))],
|
||||
}
|
||||
|
||||
|
||||
@ -106,10 +114,9 @@ class KeystoneInstaller(comp.PythonInstallComponent):
|
||||
|
||||
def _sync_db(self):
|
||||
LOG.info("Syncing keystone to database named %s.", DB_NAME)
|
||||
params = dict()
|
||||
params['BINDIR'] = self.bin_dir
|
||||
mp = self._get_param_map(None)
|
||||
cmds = [{'cmd': SYNC_DB_CMD}]
|
||||
utils.execute_template(*cmds, cwd=self.bin_dir, params=params)
|
||||
utils.execute_template(*cmds, cwd=self.bin_dir, params=mp)
|
||||
|
||||
def _get_config_files(self):
|
||||
return list(CONFIGS)
|
||||
@ -122,8 +129,9 @@ class KeystoneInstaller(comp.PythonInstallComponent):
|
||||
def _setup_initer(self):
|
||||
LOG.info("Configuring keystone initializer template %s.", MANAGE_DATA_CONF)
|
||||
(_, contents) = utils.load_template(self.component_name, MANAGE_DATA_CONF)
|
||||
params = self._get_param_map(MANAGE_DATA_CONF)
|
||||
contents = utils.param_replace(contents, params, True)
|
||||
mp = self._get_param_map(MANAGE_DATA_CONF)
|
||||
contents = utils.param_replace(contents, mp, True)
|
||||
# FIXME, stop placing in checkout dir...
|
||||
tgt_fn = sh.joinpths(self.bin_dir, MANAGE_DATA_CONF)
|
||||
sh.write_file(tgt_fn, contents)
|
||||
sh.chmod(tgt_fn, 0755)
|
||||
@ -168,6 +176,7 @@ class KeystoneInstaller(comp.PythonInstallComponent):
|
||||
|
||||
def _get_source_config(self, config_fn):
|
||||
if config_fn == LOGGING_CONF:
|
||||
# FIXME, maybe we shouldn't be sucking this from the checkout??
|
||||
fn = sh.joinpths(self.app_dir, 'etc', LOGGING_SOURCE_FN)
|
||||
contents = sh.load_file(fn)
|
||||
return (fn, contents)
|
||||
@ -179,7 +188,7 @@ class KeystoneInstaller(comp.PythonInstallComponent):
|
||||
def _get_param_map(self, config_fn):
|
||||
# These be used to fill in the configuration/cmds +
|
||||
# params with actual values
|
||||
mp = dict()
|
||||
mp = comp.PythonInstallComponent._get_param_map(self, config_fn)
|
||||
mp['SERVICE_HOST'] = self.cfg.get('host', 'ip')
|
||||
mp['DEST'] = self.app_dir
|
||||
mp['BIN_DIR'] = self.bin_dir
|
||||
@ -211,10 +220,23 @@ class KeystoneRuntime(comp.PythonRuntime):
|
||||
env['ENABLED_SERVICES'] = ",".join(self.instances.keys())
|
||||
env['BIN_DIR'] = self.bin_dir
|
||||
setup_cmd = MANAGE_CMD_ROOT + [tgt_fn]
|
||||
LOG.info("Running (%s) command to initialize keystone." % (" ".join(setup_cmd)))
|
||||
LOG.info("Running %r command to initialize keystone." % (" ".join(setup_cmd)))
|
||||
sh.execute(*setup_cmd, env_overrides=env, run_as_root=False)
|
||||
LOG.debug("Removing (%s) file since we successfully initialized keystone." % (tgt_fn))
|
||||
sh.unlink(tgt_fn)
|
||||
self._backup_key_init(tgt_fn, env)
|
||||
|
||||
def _backup_key_init(self, src_fn, env):
|
||||
tgt_fn = utils.make_backup_fn(src_fn)
|
||||
LOG.debug("Moving %r to %r since we successfully initialized keystone.", src_fn, tgt_fn)
|
||||
sh.move(src_fn, tgt_fn)
|
||||
add_lines = list()
|
||||
add_lines.append('')
|
||||
add_lines.append('# Ran on %s by %s' % (date.rcf8222date(), sh.getuser()))
|
||||
add_lines.append('# With environment:')
|
||||
for k, v in env.items():
|
||||
add_lines.append('# %s => %s' % (k, v))
|
||||
sh.append_file(tgt_fn, utils.joinlinesep(add_lines))
|
||||
# FIXME - add a trace?
|
||||
return tgt_fn
|
||||
|
||||
def _get_apps_to_start(self):
|
||||
apps = list()
|
||||
@ -234,11 +256,11 @@ def get_shared_params(config, pw_gen, service_user_name=None):
|
||||
host_ip = config.get('host', 'ip')
|
||||
|
||||
# These match what is in keystone_init.sh
|
||||
mp['SERVICE_TENANT_NAME'] = 'service'
|
||||
mp['SERVICE_TENANT_NAME'] = MANGER_SERVICE_TENANT
|
||||
if service_user_name:
|
||||
mp['SERVICE_USERNAME'] = str(service_user_name)
|
||||
mp['ADMIN_USER_NAME'] = 'admin'
|
||||
mp['DEMO_USER_NAME'] = 'demo'
|
||||
mp['ADMIN_USER_NAME'] = MANAGE_ADMIN_USER
|
||||
mp['DEMO_USER_NAME'] = MANAGE_DEMO_USER
|
||||
mp['ADMIN_TENANT_NAME'] = mp['ADMIN_USER_NAME']
|
||||
mp['DEMO_TENANT_NAME'] = mp['DEMO_USER_NAME']
|
||||
|
||||
|
@ -113,6 +113,7 @@ class MelangeInstaller(comp.PythonInstallComponent):
|
||||
|
||||
def _get_source_config(self, config_fn):
|
||||
if config_fn == ROOT_CONF:
|
||||
# FIXME, maybe we shouldn't be sucking this from the checkout??
|
||||
fn = sh.joinpths(self.app_dir, 'etc', 'melange', config_fn)
|
||||
contents = sh.load_file(fn)
|
||||
return (fn, contents)
|
||||
@ -144,9 +145,9 @@ class MelangeRuntime(comp.PythonRuntime):
|
||||
return APP_OPTIONS.get(app)
|
||||
|
||||
def _get_param_map(self, app_name):
|
||||
pmap = comp.PythonRuntime._get_param_map(self, app_name)
|
||||
pmap['CFG_FILE'] = sh.joinpths(self.cfg_dir, ROOT_CONF_REAL_NAME)
|
||||
return pmap
|
||||
mp = comp.PythonRuntime._get_param_map(self, app_name)
|
||||
mp['CFG_FILE'] = sh.joinpths(self.cfg_dir, ROOT_CONF_REAL_NAME)
|
||||
return mp
|
||||
|
||||
def known_options(self):
|
||||
return set(["create-cidr"])
|
||||
|
@ -54,7 +54,7 @@ DB_NAME = 'nova'
|
||||
|
||||
# This makes the database be in sync with nova
|
||||
DB_SYNC_CMD = [
|
||||
{'cmd': ['%BIN_DIR%/nova-manage', CFG_FILE_OPT, '%CFGFILE%', 'db', 'sync']},
|
||||
{'cmd': ['%BIN_DIR%/nova-manage', CFG_FILE_OPT, '%CFG_FILE%', 'db', 'sync']},
|
||||
]
|
||||
|
||||
# These are used for nova volumes
|
||||
@ -96,15 +96,15 @@ SUBSYSTEMS = [NCPU, NVOL, NAPI,
|
||||
# What to start
|
||||
APP_OPTIONS = {
|
||||
#these are currently the core components/applications
|
||||
'nova-api': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-compute': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-volume': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-network': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-scheduler': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-cert': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-objectstore': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-consoleauth': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-xvpvncproxy': [CFG_FILE_OPT, '%CFGFILE%'],
|
||||
'nova-api': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-compute': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-volume': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-network': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-scheduler': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-cert': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-objectstore': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-consoleauth': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
'nova-xvpvncproxy': [CFG_FILE_OPT, '%CFG_FILE%'],
|
||||
}
|
||||
|
||||
# Sub component names to actual app names (matching previous dict)
|
||||
@ -196,15 +196,32 @@ def canon_virt_driver(virt_driver):
|
||||
return virt_driver
|
||||
|
||||
|
||||
class NovaUninstaller(comp.PythonUninstallComponent):
|
||||
class NovaMixin(object):
|
||||
|
||||
def known_options(self):
|
||||
return set(['no-vnc', 'quantum', 'melange'])
|
||||
|
||||
def known_subsystems(self):
|
||||
return list(SUBSYSTEMS)
|
||||
|
||||
def _get_config_files(self):
|
||||
return list(CONFIGS)
|
||||
|
||||
def _get_download_locations(self):
|
||||
places = list()
|
||||
places.append({
|
||||
'uri': ("git", "nova_repo"),
|
||||
'branch': ("git", "nova_branch"),
|
||||
})
|
||||
return places
|
||||
|
||||
|
||||
class NovaUninstaller(NovaMixin, comp.PythonUninstallComponent):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonUninstallComponent.__init__(self, *args, **kargs)
|
||||
self.bin_dir = sh.joinpths(self.app_dir, BIN_DIR)
|
||||
self.virsh = lv.Virsh(self.cfg, self.distro)
|
||||
|
||||
def known_subsystems(self):
|
||||
return SUBSYSTEMS
|
||||
|
||||
def pre_uninstall(self):
|
||||
self._clear_libvirt_domains()
|
||||
self._clean_it()
|
||||
@ -218,7 +235,7 @@ class NovaUninstaller(comp.PythonUninstallComponent):
|
||||
env['VOLUME_NAME_PREFIX'] = self.cfg.getdefaulted('nova', 'volume_name_prefix', DEF_VOL_PREFIX)
|
||||
cleaner_fn = sh.joinpths(self.bin_dir, CLEANER_DATA_CONF)
|
||||
if sh.isfile(cleaner_fn):
|
||||
LOG.info("Cleaning up your system by running nova cleaner script [%s]." % (cleaner_fn))
|
||||
LOG.info("Cleaning up your system by running nova cleaner script %r" % (cleaner_fn))
|
||||
cmd = CLEANER_CMD_ROOT + [cleaner_fn]
|
||||
sh.execute(*cmd, run_as_root=True, env_overrides=env)
|
||||
|
||||
@ -230,7 +247,7 @@ class NovaUninstaller(comp.PythonUninstallComponent):
|
||||
self.virsh.clear_domains(libvirt_type, inst_prefix)
|
||||
|
||||
|
||||
class NovaInstaller(comp.PythonInstallComponent):
|
||||
class NovaInstaller(NovaMixin, comp.PythonInstallComponent):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonInstallComponent.__init__(self, *args, **kargs)
|
||||
self.bin_dir = sh.joinpths(self.app_dir, BIN_DIR)
|
||||
@ -242,26 +259,12 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
if NXVNC in self.desired_subsystems:
|
||||
self.xvnc_enabled = True
|
||||
|
||||
def known_options(self):
|
||||
return set(['no-vnc', 'quantum', 'melange'])
|
||||
|
||||
def known_subsystems(self):
|
||||
return SUBSYSTEMS
|
||||
|
||||
def _get_symlinks(self):
|
||||
links = comp.PythonInstallComponent._get_symlinks(self)
|
||||
source_fn = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
links[source_fn] = sh.joinpths(self._get_link_dir(), API_CONF)
|
||||
return links
|
||||
|
||||
def _get_download_locations(self):
|
||||
places = list()
|
||||
places.append({
|
||||
'uri': ("git", "nova_repo"),
|
||||
'branch': ("git", "nova_branch"),
|
||||
})
|
||||
return places
|
||||
|
||||
def warm_configs(self):
|
||||
warm_pws = list(WARMUP_PWS)
|
||||
driver_canon = canon_virt_driver(self.cfg.get('nova', 'virt_driver'))
|
||||
@ -270,14 +273,12 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
for pw_key, pw_prompt in warm_pws:
|
||||
self.pw_gen.get_password(pw_key, pw_prompt)
|
||||
|
||||
def _get_config_files(self):
|
||||
return list(CONFIGS)
|
||||
|
||||
def _setup_network_initer(self):
|
||||
LOG.info("Configuring nova network initializer template %s.", NET_INIT_CONF)
|
||||
(_, contents) = utils.load_template(self.component_name, NET_INIT_CONF)
|
||||
params = self._get_param_map(NET_INIT_CONF)
|
||||
contents = utils.param_replace(contents, params, True)
|
||||
# FIXME, stop placing in checkout dir...
|
||||
tgt_fn = sh.joinpths(self.bin_dir, NET_INIT_CONF)
|
||||
sh.write_file(tgt_fn, contents)
|
||||
sh.chmod(tgt_fn, 0755)
|
||||
@ -287,7 +288,7 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
LOG.info("Syncing the database with nova.")
|
||||
mp = dict()
|
||||
mp['BIN_DIR'] = self.bin_dir
|
||||
mp['CFGFILE'] = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
mp['CFG_FILE'] = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
utils.execute_template(*DB_SYNC_CMD, params=mp)
|
||||
|
||||
def post_install(self):
|
||||
@ -303,8 +304,9 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
vol_maker.setup_volumes()
|
||||
|
||||
def _setup_cleaner(self):
|
||||
LOG.info("Configuring cleaner template %s.", CLEANER_DATA_CONF)
|
||||
LOG.info("Configuring cleaner template %r", CLEANER_DATA_CONF)
|
||||
(_, contents) = utils.load_template(self.component_name, CLEANER_DATA_CONF)
|
||||
# FIXME, stop placing in checkout dir...
|
||||
tgt_fn = sh.joinpths(self.bin_dir, CLEANER_DATA_CONF)
|
||||
sh.write_file(tgt_fn, contents)
|
||||
sh.chmod(tgt_fn, 0755)
|
||||
@ -316,7 +318,7 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
db.create_db(self.cfg, self.pw_gen, self.distro, DB_NAME)
|
||||
|
||||
def _generate_nova_conf(self):
|
||||
LOG.info("Generating dynamic content for nova configuration (%s)." % (API_CONF))
|
||||
LOG.info("Generating dynamic content for nova in file %r" % (API_CONF))
|
||||
conf_gen = NovaConfConfigurator(self)
|
||||
nova_conf_contents = conf_gen.configure()
|
||||
conf_fn = self._get_target_config_name(API_CONF)
|
||||
@ -330,15 +332,15 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
return comp.PythonInstallComponent._get_source_config(self, PASTE_SOURCE_FN)
|
||||
if config_fn == LOGGING_CONF:
|
||||
config_fn = LOGGING_SOURCE_FN
|
||||
# FIXME, maybe we shouldn't be sucking these from checked out code?
|
||||
fn = sh.joinpths(self.app_dir, 'etc', "nova", config_fn)
|
||||
contents = sh.load_file(fn)
|
||||
return (fn, contents)
|
||||
|
||||
def _get_param_map(self, config_fn):
|
||||
mp = dict()
|
||||
mp = comp.PythonInstallComponent._get_param_map(self, config_fn)
|
||||
mp['CFG_FILE'] = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
if config_fn == NET_INIT_CONF:
|
||||
mp['NOVA_DIR'] = self.app_dir
|
||||
mp['CFG_FILE'] = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
mp['FLOATING_RANGE'] = self.cfg.getdefaulted('nova', 'floating_range', '172.24.4.224/28')
|
||||
mp['TEST_FLOATING_RANGE'] = self.cfg.getdefaulted('nova', 'test_floating_range', '192.168.253.0/29')
|
||||
mp['TEST_FLOATING_POOL'] = self.cfg.getdefaulted('nova', 'test_floating_pool', 'test')
|
||||
@ -355,13 +357,27 @@ class NovaInstaller(comp.PythonInstallComponent):
|
||||
return configs_made
|
||||
|
||||
|
||||
class NovaRuntime(comp.PythonRuntime):
|
||||
class NovaRuntime(NovaMixin, comp.PythonRuntime):
|
||||
def __init__(self, *args, **kargs):
|
||||
comp.PythonRuntime.__init__(self, *args, **kargs)
|
||||
self.bin_dir = sh.joinpths(self.app_dir, BIN_DIR)
|
||||
self.wait_time = max(self.cfg.getint('default', 'service_wait_seconds'), 1)
|
||||
self.virsh = lv.Virsh(self.cfg, self.distro)
|
||||
|
||||
def _backup_network_init(self, src_fn, env):
|
||||
tgt_fn = utils.make_backup_fn(src_fn)
|
||||
LOG.debug("Moving %r to %r since we successfully initialized nova's network.", src_fn, tgt_fn)
|
||||
sh.move(src_fn, tgt_fn)
|
||||
add_lines = list()
|
||||
add_lines.append('')
|
||||
add_lines.append('# Ran on %s by %s' % (date.rcf8222date(), sh.getuser()))
|
||||
add_lines.append('# With environment:')
|
||||
for k, v in env.items():
|
||||
add_lines.append('# %s => %s' % (k, v))
|
||||
sh.append_file(tgt_fn, utils.joinlinesep(add_lines))
|
||||
# FIXME - add a trace?
|
||||
return tgt_fn
|
||||
|
||||
def _setup_network_init(self):
|
||||
tgt_fn = sh.joinpths(self.bin_dir, NET_INIT_CONF)
|
||||
if sh.isfile(tgt_fn):
|
||||
@ -377,25 +393,18 @@ class NovaRuntime(comp.PythonRuntime):
|
||||
setup_cmd = NET_INIT_CMD_ROOT + [tgt_fn]
|
||||
LOG.info("Running (%s) command to initialize nova's network." % (" ".join(setup_cmd)))
|
||||
sh.execute(*setup_cmd, env_overrides=env, run_as_root=False)
|
||||
LOG.debug("Removing (%s) file since we successfully initialized nova's network." % (tgt_fn))
|
||||
sh.unlink(tgt_fn)
|
||||
self._backup_network_init(tgt_fn, env)
|
||||
|
||||
def post_start(self):
|
||||
self._setup_network_init()
|
||||
|
||||
def known_options(self):
|
||||
return set(['quantum'])
|
||||
|
||||
def known_subsystems(self):
|
||||
return SUBSYSTEMS
|
||||
|
||||
def _get_apps_to_start(self):
|
||||
apps = list()
|
||||
for subsys in self.desired_subsystems:
|
||||
app = dict()
|
||||
app['name'] = SUB_COMPONENT_NAME_MAP[subsys]
|
||||
app['path'] = sh.joinpths(self.bin_dir, app['name'])
|
||||
apps.append(app)
|
||||
apps.append({
|
||||
'name': SUB_COMPONENT_NAME_MAP[subsys],
|
||||
'path': sh.joinpths(self.bin_dir, SUB_COMPONENT_NAME_MAP[subsys]),
|
||||
})
|
||||
return apps
|
||||
|
||||
def pre_start(self):
|
||||
@ -418,7 +427,7 @@ class NovaRuntime(comp.PythonRuntime):
|
||||
|
||||
def _get_param_map(self, app_name):
|
||||
params = comp.PythonRuntime._get_param_map(self, app_name)
|
||||
params['CFGFILE'] = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
params['CFG_FILE'] = sh.joinpths(self.cfg_dir, API_CONF)
|
||||
return params
|
||||
|
||||
def _get_app_options(self, app):
|
||||
|
@ -163,6 +163,7 @@ class QuantumInstaller(QuantumMixin, comp.PkgInstallComponent):
|
||||
contents = sh.load_file(srcfn)
|
||||
return (srcfn, contents)
|
||||
elif config_fn == AGENT_CONF:
|
||||
# WHY U SO BURIED....
|
||||
srcfn = sh.joinpths(self.app_dir, 'etc', 'quantum', 'plugins', 'openvswitch', config_fn)
|
||||
contents = sh.load_file(srcfn)
|
||||
return (srcfn, contents)
|
||||
|
@ -45,15 +45,19 @@ ROOT_PATH = os.sep
|
||||
DRYRUN_MODE = False
|
||||
DRY_RC = 0
|
||||
DRY_STDOUT_ERR = ("", "")
|
||||
BOOL_MP = {
|
||||
True: 'true',
|
||||
False: 'false',
|
||||
}
|
||||
|
||||
|
||||
def set_dryrun(val):
|
||||
global DRYRUN_MODE
|
||||
if val:
|
||||
LOG.debug("Setting dryrun to: %s" % (True))
|
||||
LOG.debug("Setting dryrun to: %s" % (BOOL_MP.get(True)))
|
||||
DRYRUN_MODE = True
|
||||
else:
|
||||
LOG.debug("Resetting dryrun to: %s" % (False))
|
||||
LOG.debug("Resetting dryrun to: %s" % (BOOL_MP.get(False)))
|
||||
DRYRUN_MODE = False
|
||||
|
||||
|
||||
@ -109,7 +113,7 @@ def execute(*cmd, **kwargs):
|
||||
LOG.audit('With stdin: %s' % (process_input))
|
||||
|
||||
if cwd:
|
||||
LOG.audit("In working directory: %s" % (cwd))
|
||||
LOG.audit("In working directory: %r" % (cwd))
|
||||
|
||||
stdin_fh = subprocess.PIPE
|
||||
stdout_fh = subprocess.PIPE
|
||||
@ -180,7 +184,7 @@ def execute(*cmd, **kwargs):
|
||||
else:
|
||||
# Log it anyway
|
||||
if rc not in check_exit_code:
|
||||
LOG.debug("A failure may of just happened when running command \"%s\" [%s] (%s, %s)", \
|
||||
LOG.debug("A failure may of just happened when running command %r [%s] (%s, %s)", \
|
||||
str_cmd, rc, stdout.strip(), stderr.strip())
|
||||
# Log for debugging figuring stuff out
|
||||
LOG.debug("Received stdout: %s" % (stdout.strip()))
|
||||
@ -247,16 +251,18 @@ def _get_suids():
|
||||
def chown_r(path, uid, gid, run_as_root=True):
|
||||
with Rooted(run_as_root):
|
||||
if isdir(path):
|
||||
LOG.audit("Changing ownership of %s to %s:%s" % (path, uid, gid))
|
||||
LOG.audit("Changing ownership of %r to %s:%s" % (path, uid, gid))
|
||||
for root, dirs, files in os.walk(path):
|
||||
os.chown(root, uid, gid)
|
||||
LOG.audit("Changing ownership of %s to %s:%s" % (root, uid, gid))
|
||||
LOG.audit("Changing ownership of %r to %s:%s" % (root, uid, gid))
|
||||
for d in dirs:
|
||||
os.chown(joinpths(root, d), uid, gid)
|
||||
LOG.audit("Changing ownership of %s to %s:%s" % (joinpths(root, d), uid, gid))
|
||||
dir_pth = joinpths(root, d)
|
||||
os.chown(dir_pth, uid, gid)
|
||||
LOG.audit("Changing ownership of %r to %s:%s" % (dir_pth, uid, gid))
|
||||
for f in files:
|
||||
os.chown(joinpths(root, f), uid, gid)
|
||||
LOG.audit("Changing ownership of %s to %s:%s" % (joinpths(root, f), uid, gid))
|
||||
fn_pth = joinpths(root, f)
|
||||
os.chown(fn_pth, uid, gid)
|
||||
LOG.audit("Changing ownership of %r to %s:%s" % (fn_pth, uid, gid))
|
||||
|
||||
|
||||
def _explode_path(path):
|
||||
@ -291,7 +297,7 @@ def remove_parents(child_path, paths):
|
||||
return list()
|
||||
cleaned_paths = [abspth(p) for p in paths]
|
||||
cleaned_child_path = abspth(child_path)
|
||||
LOG.audit("Removing parents of [%s] from input [%s]" % (cleaned_child_path, ",".join(cleaned_paths)))
|
||||
LOG.audit("Removing parents of %r from input [%s]" % (cleaned_child_path, ",".join(cleaned_paths)))
|
||||
to_check_paths = [_explode_path(p) for p in cleaned_paths]
|
||||
check_path = _explode_path(cleaned_child_path)
|
||||
new_paths = list()
|
||||
@ -317,7 +323,7 @@ def _array_begins_with(haystack, needle):
|
||||
|
||||
|
||||
def mkdirslist(path):
|
||||
LOG.debug("Determining potential paths to create for target path \"%s\"" % (path))
|
||||
LOG.debug("Determining potential paths to create for target path %r" % (path))
|
||||
dirs_possible = _explode_form_path(path)
|
||||
dirs_made = list()
|
||||
for check_path in dirs_possible:
|
||||
@ -329,7 +335,7 @@ def mkdirslist(path):
|
||||
|
||||
def append_file(fn, text, flush=True, quiet=False):
|
||||
if not quiet:
|
||||
LOG.audit("Appending to file %s (%d bytes) (flush=%s)", fn, len(text), flush)
|
||||
LOG.audit("Appending to file %r (%d bytes) (flush=%s)", fn, len(text), BOOL_MP.get(flush))
|
||||
LOG.audit(">> %s" % (text))
|
||||
if not DRYRUN_MODE:
|
||||
with open(fn, "a") as f:
|
||||
@ -341,7 +347,7 @@ def append_file(fn, text, flush=True, quiet=False):
|
||||
|
||||
def write_file(fn, text, flush=True, quiet=False):
|
||||
if not quiet:
|
||||
LOG.audit("Writing to file %s (%d bytes) (flush=%s)", fn, len(text), flush)
|
||||
LOG.audit("Writing to file %r (%d bytes) (flush=%s)", fn, len(text), BOOL_MP.get(flush))
|
||||
LOG.audit("> %s" % (text))
|
||||
if not DRYRUN_MODE:
|
||||
with open(fn, "w") as f:
|
||||
@ -354,37 +360,37 @@ def write_file(fn, text, flush=True, quiet=False):
|
||||
def touch_file(fn, die_if_there=True, quiet=False, file_size=0):
|
||||
if not isfile(fn):
|
||||
if not quiet:
|
||||
LOG.audit("Touching and truncating file %s (truncate size=%s)", fn, file_size)
|
||||
LOG.audit("Touching and truncating file %r (truncate size=%s)", fn, file_size)
|
||||
if not DRYRUN_MODE:
|
||||
with open(fn, "w") as f:
|
||||
f.truncate(file_size)
|
||||
else:
|
||||
if die_if_there:
|
||||
msg = "Can not touch & truncate file %s since it already exists" % (fn)
|
||||
msg = "Can not touch & truncate file %r since it already exists" % (fn)
|
||||
raise excp.FileException(msg)
|
||||
return fn
|
||||
|
||||
|
||||
def load_file(fn, quiet=False):
|
||||
if not quiet:
|
||||
LOG.audit("Loading data from file %s", fn)
|
||||
LOG.audit("Loading data from file %r", fn)
|
||||
data = ""
|
||||
if not DRYRUN_MODE:
|
||||
with open(fn, "r") as f:
|
||||
data = f.read()
|
||||
if not quiet:
|
||||
LOG.audit("Loaded (%d) bytes from file %s", len(data), fn)
|
||||
LOG.audit("Loaded (%d) bytes from file %r", len(data), fn)
|
||||
return data
|
||||
|
||||
|
||||
def mkdir(path, recurse=True):
|
||||
if not isdir(path):
|
||||
if recurse:
|
||||
LOG.audit("Recursively creating directory \"%s\"" % (path))
|
||||
LOG.audit("Recursively creating directory %r" % (path))
|
||||
if not DRYRUN_MODE:
|
||||
os.makedirs(path)
|
||||
else:
|
||||
LOG.audit("Creating directory \"%s\"" % (path))
|
||||
LOG.audit("Creating directory %r" % (path))
|
||||
if not DRYRUN_MODE:
|
||||
os.mkdir(path)
|
||||
|
||||
@ -392,7 +398,7 @@ def mkdir(path, recurse=True):
|
||||
def deldir(path, run_as_root=False):
|
||||
with Rooted(run_as_root):
|
||||
if isdir(path):
|
||||
LOG.audit("Recursively deleting directory tree starting at \"%s\"" % (path))
|
||||
LOG.audit("Recursively deleting directory tree starting at %r" % (path))
|
||||
if not DRYRUN_MODE:
|
||||
shutil.rmtree(path)
|
||||
|
||||
@ -402,10 +408,10 @@ def rmdir(path, quiet=True, run_as_root=False):
|
||||
return
|
||||
try:
|
||||
with Rooted(run_as_root):
|
||||
LOG.audit("Deleting directory \"%s\" with the cavet that we will fail if it's not empty." % (path))
|
||||
LOG.audit("Deleting directory %r with the cavet that we will fail if it's not empty." % (path))
|
||||
if not DRYRUN_MODE:
|
||||
os.rmdir(path)
|
||||
LOG.audit("Deleted directory \"%s\"" % (path))
|
||||
LOG.audit("Deleted directory %r" % (path))
|
||||
except OSError:
|
||||
if not quiet:
|
||||
raise
|
||||
@ -415,7 +421,7 @@ def rmdir(path, quiet=True, run_as_root=False):
|
||||
|
||||
def symlink(source, link, force=True, run_as_root=True):
|
||||
with Rooted(run_as_root):
|
||||
LOG.audit("Creating symlink from %s => %s" % (link, source))
|
||||
LOG.audit("Creating symlink from %r => %r" % (link, source))
|
||||
path = dirname(link)
|
||||
needed_pths = mkdirslist(path)
|
||||
if not DRYRUN_MODE:
|
||||
@ -531,7 +537,7 @@ def umount(dev_name, ignore_errors=True):
|
||||
|
||||
|
||||
def unlink(path, ignore_errors=True, run_as_root=False):
|
||||
LOG.audit("Unlinking (removing) %s" % (path))
|
||||
LOG.audit("Unlinking (removing) %r" % (path))
|
||||
if not DRYRUN_MODE:
|
||||
try:
|
||||
with Rooted(run_as_root):
|
||||
@ -544,14 +550,14 @@ def unlink(path, ignore_errors=True, run_as_root=False):
|
||||
|
||||
|
||||
def move(src, dst):
|
||||
LOG.audit("Moving: %s => %s" % (src, dst))
|
||||
LOG.audit("Moving: %r => %r" % (src, dst))
|
||||
if not DRYRUN_MODE:
|
||||
shutil.move(src, dst)
|
||||
return dst
|
||||
|
||||
|
||||
def chmod(fname, mode):
|
||||
LOG.audit("Applying chmod: %s to %o" % (fname, mode))
|
||||
LOG.audit("Applying chmod: %r to %o" % (fname, mode))
|
||||
if not DRYRUN_MODE:
|
||||
os.chmod(fname, mode)
|
||||
return fname
|
||||
@ -571,7 +577,7 @@ def replace_in(fn, search, replace, run_as_root=False):
|
||||
|
||||
def copy_replace_file(fsrc, fdst, linemap):
|
||||
files = mkdirslist(dirname(fdst))
|
||||
LOG.audit("Copying and replacing file: %s => %s" % (fsrc, fdst))
|
||||
LOG.audit("Copying and replacing file: %r => %r" % (fsrc, fdst))
|
||||
if not DRYRUN_MODE:
|
||||
with open(fdst, 'w') as fh:
|
||||
for line in fileinput.input(fsrc):
|
||||
|
@ -98,6 +98,10 @@ def configure_logging(verbosity_level=1, dry_run=False):
|
||||
root_logger.setLevel(log_level)
|
||||
|
||||
|
||||
def make_backup_fn(src_fn):
|
||||
return "%s.bak" % (src_fn)
|
||||
|
||||
|
||||
def load_template(component, template_name):
|
||||
full_pth = sh.joinpths(settings.STACK_TEMPLATE_DIR, component, template_name)
|
||||
contents = sh.load_file(full_pth)
|
||||
@ -338,7 +342,8 @@ def param_replace(text, replacements, ignore_missing=False):
|
||||
LOG.debug("Performing parameter replacements (not ignoring missing) on text [%s]" % (text))
|
||||
|
||||
possible_params = find_params(text)
|
||||
LOG.debug("Possible replacements are [%s]" % (", ".join(possible_params)))
|
||||
LOG.debug("Possible replacements are: %r" % (", ".join(possible_params)))
|
||||
LOG.debug("Given substitutions are: %s" % (replacements))
|
||||
|
||||
def replacer(match):
|
||||
org_txt = match.group(0)
|
||||
|
Loading…
x
Reference in New Issue
Block a user