Use CONF for oslo_config's cfg.CONF
In most OpenStack projects, the instance of oslo_config's cfg.CONF is declared as CONF. This patch applies it for readability on whole OpenStack components. Change-Id: I7758a1ce86de8689fb2c5e420e042fa56c275927
This commit is contained in:
		@@ -32,6 +32,7 @@ from stackalytics.processor import utils
 | 
			
		||||
from stackalytics import version as stackalytics_version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -498,7 +499,7 @@ def profiler_decorator(func):
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def profiler_decorated_function(*args, **kwargs):
 | 
			
		||||
        profiler = None
 | 
			
		||||
        profile_filename = cfg.CONF.collect_profiler_stats
 | 
			
		||||
        profile_filename = CONF.collect_profiler_stats
 | 
			
		||||
 | 
			
		||||
        if profile_filename:
 | 
			
		||||
            LOG.debug('Profiler is enabled')
 | 
			
		||||
 
 | 
			
		||||
@@ -21,6 +21,8 @@ import time
 | 
			
		||||
from stackalytics.dashboard import vault
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
 | 
			
		||||
DEFAULTS = {
 | 
			
		||||
    'review_nth': 5,
 | 
			
		||||
}
 | 
			
		||||
@@ -66,7 +68,7 @@ DEFAULT_STATIC_ACTIVITY_SIZE = 100
 | 
			
		||||
 | 
			
		||||
def get_default(param_name):
 | 
			
		||||
    if 'release' not in DEFAULTS:
 | 
			
		||||
        release = cfg.CONF.default_release
 | 
			
		||||
        release = CONF.default_release
 | 
			
		||||
        if not release:
 | 
			
		||||
            runtime_storage_inst = vault.get_runtime_storage()
 | 
			
		||||
            releases = runtime_storage_inst.get_by_key('releases')
 | 
			
		||||
@@ -80,8 +82,8 @@ def get_default(param_name):
 | 
			
		||||
            else:
 | 
			
		||||
                release = 'all'
 | 
			
		||||
        DEFAULTS['release'] = release.lower()
 | 
			
		||||
        DEFAULTS['metric'] = cfg.CONF.default_metric.lower()
 | 
			
		||||
        DEFAULTS['project_type'] = cfg.CONF.default_project_type.lower()
 | 
			
		||||
        DEFAULTS['metric'] = CONF.default_metric.lower()
 | 
			
		||||
        DEFAULTS['project_type'] = CONF.default_project_type.lower()
 | 
			
		||||
 | 
			
		||||
    if param_name in DEFAULTS:
 | 
			
		||||
        return DEFAULTS[param_name]
 | 
			
		||||
 
 | 
			
		||||
@@ -28,6 +28,7 @@ from stackalytics.processor import user_processor
 | 
			
		||||
from stackalytics.processor import utils
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -79,7 +80,7 @@ def get_vault():
 | 
			
		||||
        try:
 | 
			
		||||
            vault = {}
 | 
			
		||||
            runtime_storage_inst = runtime_storage.get_runtime_storage(
 | 
			
		||||
                cfg.CONF.runtime_storage_uri)
 | 
			
		||||
                CONF.runtime_storage_uri)
 | 
			
		||||
            vault['runtime_storage'] = runtime_storage_inst
 | 
			
		||||
            vault['memory_storage'] = memory_storage.get_memory_storage(
 | 
			
		||||
                memory_storage.MEMORY_STORAGE_CACHED)
 | 
			
		||||
@@ -97,7 +98,7 @@ def get_vault():
 | 
			
		||||
            flask.request.stackalytics_updated = True
 | 
			
		||||
            vault['vault_update_time'] = time_now
 | 
			
		||||
            vault['vault_next_update_time'] = (
 | 
			
		||||
                time_now + cfg.CONF.dashboard_update_interval)
 | 
			
		||||
                time_now + CONF.dashboard_update_interval)
 | 
			
		||||
            memory_storage_inst = vault['memory_storage']
 | 
			
		||||
            have_updates = memory_storage_inst.update(compact_records(
 | 
			
		||||
                vault['runtime_storage'].get_update(os.getpid())))
 | 
			
		||||
 
 | 
			
		||||
@@ -42,8 +42,8 @@ app.register_blueprint(kpi.blueprint)
 | 
			
		||||
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
conf = cfg.CONF
 | 
			
		||||
conf.register_opts(config.CONNECTION_OPTS + config.DASHBOARD_OPTS)
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
CONF.register_opts(config.CONNECTION_OPTS + config.DASHBOARD_OPTS)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Handlers ---------
 | 
			
		||||
@@ -680,20 +680,20 @@ def too_old(timestamp):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    logging.register_options(conf)
 | 
			
		||||
    logging.register_options(CONF)
 | 
			
		||||
    logging.set_defaults()
 | 
			
		||||
 | 
			
		||||
    conf_file = os.getenv('STACKALYTICS_CONF')
 | 
			
		||||
    if conf_file and os.path.isfile(conf_file):
 | 
			
		||||
        conf(default_config_files=[conf_file])
 | 
			
		||||
        app.config['DEBUG'] = cfg.CONF.debug
 | 
			
		||||
        CONF(default_config_files=[conf_file])
 | 
			
		||||
        app.config['DEBUG'] = CONF.debug
 | 
			
		||||
        LOG.info('Stackalytics.dashboard is configured via "%s"', conf_file)
 | 
			
		||||
    else:
 | 
			
		||||
        conf(project='stackalytics')
 | 
			
		||||
        CONF(project='stackalytics')
 | 
			
		||||
 | 
			
		||||
    logging.setup(conf, 'stackalytics.dashboard')
 | 
			
		||||
    logging.setup(CONF, 'stackalytics.dashboard')
 | 
			
		||||
 | 
			
		||||
    app.run(cfg.CONF.listen_host, cfg.CONF.listen_port)
 | 
			
		||||
    app.run(CONF.listen_host, CONF.listen_port)
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    main()
 | 
			
		||||
 
 | 
			
		||||
@@ -28,6 +28,7 @@ from stackalytics.processor import rcs
 | 
			
		||||
from stackalytics.processor import user_processor
 | 
			
		||||
from stackalytics.processor import utils
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
GITHUB_URI_PREFIX = r'^github:\/\/'
 | 
			
		||||
@@ -50,7 +51,7 @@ def _check_default_data_change(runtime_storage_inst, default_data):
 | 
			
		||||
 | 
			
		||||
def _retrieve_project_list_from_sources(project_sources):
 | 
			
		||||
    for project_source in project_sources:
 | 
			
		||||
        uri = project_source.get('uri') or cfg.CONF.review_uri
 | 
			
		||||
        uri = project_source.get('uri') or CONF.review_uri
 | 
			
		||||
        repo_iterator = []
 | 
			
		||||
        if re.search(rcs.GERRIT_URI_PREFIX, uri):
 | 
			
		||||
            repo_iterator = _retrieve_project_list_from_gerrit(project_source)
 | 
			
		||||
@@ -66,11 +67,11 @@ def _retrieve_project_list_from_sources(project_sources):
 | 
			
		||||
def _retrieve_project_list_from_gerrit(project_source):
 | 
			
		||||
    LOG.info('Retrieving project list from Gerrit')
 | 
			
		||||
    try:
 | 
			
		||||
        uri = project_source.get('uri') or cfg.CONF.review_uri
 | 
			
		||||
        uri = project_source.get('uri') or CONF.review_uri
 | 
			
		||||
        gerrit_inst = rcs.Gerrit(uri)
 | 
			
		||||
        key_filename = (project_source.get('ssh_key_filename') or
 | 
			
		||||
                        cfg.CONF.ssh_key_filename)
 | 
			
		||||
        username = project_source.get('ssh_username') or cfg.CONF.ssh_username
 | 
			
		||||
                        CONF.ssh_key_filename)
 | 
			
		||||
        username = project_source.get('ssh_username') or CONF.ssh_username
 | 
			
		||||
        gerrit_inst.setup(key_filename=key_filename, username=username)
 | 
			
		||||
 | 
			
		||||
        project_list = gerrit_inst.get_project_list()
 | 
			
		||||
@@ -83,7 +84,7 @@ def _retrieve_project_list_from_gerrit(project_source):
 | 
			
		||||
    LOG.debug('Get list of projects for organization %s', organization)
 | 
			
		||||
    git_repos = [f for f in project_list if f.startswith(organization + "/")]
 | 
			
		||||
 | 
			
		||||
    git_base_uri = project_source.get('git_base_uri') or cfg.CONF.git_base_uri
 | 
			
		||||
    git_base_uri = project_source.get('git_base_uri') or CONF.git_base_uri
 | 
			
		||||
 | 
			
		||||
    for repo in git_repos:
 | 
			
		||||
        (org, name) = repo.split('/')
 | 
			
		||||
 
 | 
			
		||||
@@ -26,6 +26,7 @@ from stackalytics.processor import config
 | 
			
		||||
from stackalytics.processor import utils
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
OPTS = [
 | 
			
		||||
@@ -61,7 +62,7 @@ def read_records_from_fd(fd):
 | 
			
		||||
def store_bucket(memcached_inst, bucket):
 | 
			
		||||
    LOG.debug('Store bucket of records into memcached')
 | 
			
		||||
    res = memcached_inst.set_multi(bucket,
 | 
			
		||||
                                   min_compress_len=cfg.CONF.min_compress_len)
 | 
			
		||||
                                   min_compress_len=CONF.min_compress_len)
 | 
			
		||||
    if res:
 | 
			
		||||
        LOG.critical('Failed to set values in memcached: %s', res)
 | 
			
		||||
        raise Exception('memcached set_multi operation is failed')
 | 
			
		||||
@@ -158,11 +159,11 @@ def _connect_to_memcached(uri):
 | 
			
		||||
def main():
 | 
			
		||||
    utils.init_config_and_logging(config.CONNECTION_OPTS + OPTS)
 | 
			
		||||
 | 
			
		||||
    memcached_inst = _connect_to_memcached(cfg.CONF.runtime_storage_uri)
 | 
			
		||||
    memcached_inst = _connect_to_memcached(CONF.runtime_storage_uri)
 | 
			
		||||
 | 
			
		||||
    filename = cfg.CONF.file
 | 
			
		||||
    filename = CONF.file
 | 
			
		||||
 | 
			
		||||
    if cfg.CONF.restore:
 | 
			
		||||
    if CONF.restore:
 | 
			
		||||
        if filename:
 | 
			
		||||
            fd = open(filename, 'r')
 | 
			
		||||
        else:
 | 
			
		||||
 
 | 
			
		||||
@@ -36,6 +36,7 @@ from stackalytics.processor import utils
 | 
			
		||||
from stackalytics.processor import vcs
 | 
			
		||||
from stackalytics.processor import zanata
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -107,7 +108,7 @@ def _process_repo(repo, runtime_storage_inst, record_processor_inst,
 | 
			
		||||
    runtime_storage_inst.set_by_key(
 | 
			
		||||
        'bug_modified_since-%s' % repo['module'], current_date)
 | 
			
		||||
 | 
			
		||||
    vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root)
 | 
			
		||||
    vcs_inst = vcs.get_vcs(repo, CONF.sources_root)
 | 
			
		||||
    vcs_inst.fetch()
 | 
			
		||||
 | 
			
		||||
    branches = {repo.get('default_branch', 'master')}
 | 
			
		||||
@@ -186,7 +187,7 @@ def _process_mail_list(uri, runtime_storage_inst, record_processor_inst):
 | 
			
		||||
 | 
			
		||||
def _process_translation_stats(runtime_storage_inst, record_processor_inst):
 | 
			
		||||
    translation_iterator = zanata.log(runtime_storage_inst,
 | 
			
		||||
                                      cfg.CONF.translation_team_uri)
 | 
			
		||||
                                      CONF.translation_team_uri)
 | 
			
		||||
    translation_iterator_typed = _record_typer(translation_iterator, 'i18n')
 | 
			
		||||
    processed_translation_iterator = record_processor_inst.process(
 | 
			
		||||
        translation_iterator_typed)
 | 
			
		||||
@@ -195,8 +196,8 @@ def _process_translation_stats(runtime_storage_inst, record_processor_inst):
 | 
			
		||||
 | 
			
		||||
def _process_member_list(uri, runtime_storage_inst, record_processor_inst):
 | 
			
		||||
    member_iterator = mps.log(uri, runtime_storage_inst,
 | 
			
		||||
                              cfg.CONF.days_to_update_members,
 | 
			
		||||
                              cfg.CONF.members_look_ahead)
 | 
			
		||||
                              CONF.days_to_update_members,
 | 
			
		||||
                              CONF.members_look_ahead)
 | 
			
		||||
    member_iterator_typed = _record_typer(member_iterator, 'member')
 | 
			
		||||
    processed_member_iterator = record_processor_inst.process(
 | 
			
		||||
        member_iterator_typed)
 | 
			
		||||
@@ -214,7 +215,7 @@ def _post_process_records(record_processor_inst, repos):
 | 
			
		||||
    LOG.debug('Build release index')
 | 
			
		||||
    release_index = {}
 | 
			
		||||
    for repo in repos:
 | 
			
		||||
        vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root)
 | 
			
		||||
        vcs_inst = vcs.get_vcs(repo, CONF.sources_root)
 | 
			
		||||
        release_index.update(vcs_inst.fetch())
 | 
			
		||||
 | 
			
		||||
    LOG.debug('Post-process all records')
 | 
			
		||||
@@ -224,10 +225,10 @@ def _post_process_records(record_processor_inst, repos):
 | 
			
		||||
def process(runtime_storage_inst, record_processor_inst):
 | 
			
		||||
    repos = utils.load_repos(runtime_storage_inst)
 | 
			
		||||
 | 
			
		||||
    rcs_inst = rcs.get_rcs(cfg.CONF.review_uri)
 | 
			
		||||
    rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename,
 | 
			
		||||
                   username=cfg.CONF.ssh_username,
 | 
			
		||||
                   gerrit_retry=cfg.CONF.gerrit_retry)
 | 
			
		||||
    rcs_inst = rcs.get_rcs(CONF.review_uri)
 | 
			
		||||
    rcs_inst.setup(key_filename=CONF.ssh_key_filename,
 | 
			
		||||
                   username=CONF.ssh_username,
 | 
			
		||||
                   gerrit_retry=CONF.gerrit_retry)
 | 
			
		||||
 | 
			
		||||
    for repo in repos:
 | 
			
		||||
        _process_repo(repo, runtime_storage_inst, record_processor_inst,
 | 
			
		||||
@@ -310,19 +311,19 @@ def main():
 | 
			
		||||
                                  config.PROCESSOR_OPTS)
 | 
			
		||||
 | 
			
		||||
    runtime_storage_inst = runtime_storage.get_runtime_storage(
 | 
			
		||||
        cfg.CONF.runtime_storage_uri)
 | 
			
		||||
        CONF.runtime_storage_uri)
 | 
			
		||||
 | 
			
		||||
    if cfg.CONF.default_data_file:
 | 
			
		||||
        default_data = utils.read_json_from_file(cfg.CONF.default_data_file)
 | 
			
		||||
    if CONF.default_data_file:
 | 
			
		||||
        default_data = utils.read_json_from_file(CONF.default_data_file)
 | 
			
		||||
    else:
 | 
			
		||||
        default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri)
 | 
			
		||||
        default_data = utils.read_json_from_uri(CONF.default_data_uri)
 | 
			
		||||
    if not default_data:
 | 
			
		||||
        LOG.critical('Unable to load default data')
 | 
			
		||||
        return not 0
 | 
			
		||||
 | 
			
		||||
    default_data_processor.process(runtime_storage_inst,
 | 
			
		||||
                                   default_data,
 | 
			
		||||
                                   cfg.CONF.driverlog_data_uri)
 | 
			
		||||
                                   CONF.driverlog_data_uri)
 | 
			
		||||
 | 
			
		||||
    process_project_list(runtime_storage_inst)
 | 
			
		||||
 | 
			
		||||
@@ -333,7 +334,7 @@ def main():
 | 
			
		||||
 | 
			
		||||
    process(runtime_storage_inst, record_processor_inst)
 | 
			
		||||
 | 
			
		||||
    apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst)
 | 
			
		||||
    apply_corrections(CONF.corrections_uri, runtime_storage_inst)
 | 
			
		||||
 | 
			
		||||
    # long operation should be the last
 | 
			
		||||
    update_members(runtime_storage_inst, record_processor_inst)
 | 
			
		||||
 
 | 
			
		||||
@@ -31,21 +31,21 @@ import six
 | 
			
		||||
import yaml
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CONF = cfg.CONF
 | 
			
		||||
LOG = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def init_config_and_logging(opts):
 | 
			
		||||
    conf = cfg.CONF
 | 
			
		||||
    conf.register_cli_opts(opts)
 | 
			
		||||
    conf.register_opts(opts)
 | 
			
		||||
    logging.register_options(conf)
 | 
			
		||||
    CONF.register_cli_opts(opts)
 | 
			
		||||
    CONF.register_opts(opts)
 | 
			
		||||
    logging.register_options(CONF)
 | 
			
		||||
    logging.set_defaults()
 | 
			
		||||
 | 
			
		||||
    conf(project='stackalytics')
 | 
			
		||||
    CONF(project='stackalytics')
 | 
			
		||||
 | 
			
		||||
    logging.setup(conf, 'stackalytics')
 | 
			
		||||
    logging.setup(CONF, 'stackalytics')
 | 
			
		||||
    LOG.info('Logging enabled')
 | 
			
		||||
    conf.log_opt_values(LOG, logging.DEBUG)
 | 
			
		||||
    CONF.log_opt_values(LOG, logging.DEBUG)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def date_to_timestamp(d):
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user