From 28451394e44e691d36270b82b9992bbf7ca16508 Mon Sep 17 00:00:00 2001 From: xiaodongwang Date: Fri, 29 Aug 2014 11:44:22 -0700 Subject: [PATCH] update log analyzor code and fix some api bug. Change-Id: I1247cd89e4dc44559016451e509e55f0541de480 --- bin/progress_update.py | 20 +- compass/actions/update_progress.py | 316 ++++++-- compass/db/api/cluster.py | 195 ++++- compass/db/api/host.py | 97 ++- compass/db/api/utils.py | 7 +- compass/db/models.py | 47 +- compass/log_analyzor/adapter_matcher.py | 479 +---------- compass/log_analyzor/file_matcher.py | 235 ++---- compass/log_analyzor/line_matcher.py | 66 +- compass/log_analyzor/progress_calculator.py | 857 +++++++++++--------- compass/tasks/tasks.py | 27 +- compass/tests/db/api/test_utils.py | 6 +- compass/utils/setting_wrapper.py | 4 +- conf/package_installer/chef-icehouse.conf | 6 +- conf/setting | 2 +- install/compass.sh | 1 + 16 files changed, 1167 insertions(+), 1198 deletions(-) diff --git a/bin/progress_update.py b/bin/progress_update.py index d189e385..d9414cc2 100755 --- a/bin/progress_update.py +++ b/bin/progress_update.py @@ -20,6 +20,7 @@ import lockfile import logging from compass.actions import update_progress +from compass.db.api import database from compass.tasks.client import celery from compass.utils import daemonize from compass.utils import flags @@ -28,11 +29,6 @@ from compass.utils import setting_wrapper as setting from compass.utils import util -flags.add('clusters', - help=( - 'clusters to clean, the format is as ' - 'clusterid:hostname1,hostname2,...;...'), - default='') flags.add_bool('async', help='run in async mode', default=True) @@ -41,27 +37,25 @@ flags.add('run_interval', default=setting.PROGRESS_UPDATE_INTERVAL) -def progress_update(cluster_hosts): +def progress_update(): """entry function.""" if flags.OPTIONS.async: - celery.send_task('compass.tasks.update_progress', (cluster_hosts,)) + celery.send_task('compass.tasks.update_progress') else: try: - update_progress.update_progress(cluster_hosts) + update_progress.update_progress() except Exception as error: - logging.error('failed to update progress for cluster_hosts: %s', - cluster_hosts) + logging.error('failed to update progress') logging.exception(error) if __name__ == '__main__': flags.init() logsetting.init() + database.init() logging.info('run progress update') daemonize.daemonize( - functools.partial( - progress_update, - util.get_clusters_from_str(flags.OPTIONS.clusters)), + progress_update, flags.OPTIONS.run_interval, pidfile=lockfile.FileLock('/var/run/progress_update.pid'), stderr=open('/tmp/progress_update_err.log', 'w+'), diff --git a/compass/actions/update_progress.py b/compass/actions/update_progress.py index 46ce4c58..d8108cf7 100644 --- a/compass/actions/update_progress.py +++ b/compass/actions/update_progress.py @@ -19,43 +19,15 @@ import logging from compass.actions import util -from compass.db.api import database -from compass.db import models +from compass.db.api import adapter_holder as adapter_api +from compass.db.api import cluster as cluster_api +from compass.db.api import host as host_api +from compass.db.api import user as user_api from compass.log_analyzor import progress_calculator from compass.utils import setting_wrapper as setting -def _cluster_filter(cluster): - """filter cluster.""" - if not cluster.state: - logging.error('there is no state for cluster %s', - cluster.id) - return False - - if cluster.state.state != 'INSTALLING': - logging.error('the cluster %s state %s is not installing', - cluster.id, cluster.state.state) - return False - - return True - - -def _host_filter(host): - """filter host.""" - if not host.state: - logging.error('there is no state for host %s', - host.id) - return False - - if host.state.state != 'INSTALLING': - logging.error('the host %s state %s is not installing', - host.id, host.state.state) - return False - - return True - - -def update_progress(cluster_hosts): +def update_progress(): """Update status and installing progress of the given cluster. :param cluster_hosts: clusters and hosts in each cluster to update. @@ -79,54 +51,234 @@ def update_progress(cluster_hosts): 'failed to acquire lock to calculate installation progress') return - logging.info('update installing progress of cluster_hosts: %s', - cluster_hosts) - os_names = {} - distributed_systems = {} - os_installers = {} - package_installers = {} - with database.session() as session: - clusters = session.query(models.Cluster).all() - for cluster in clusters: - clusterid = cluster.id + logging.info('update installing progress') - adapter = cluster.adapter - os_installer = adapter.adapter_os_installer - if os_installer: - os_installers[clusterid] = os_installer.name - else: - os_installers[clusterid] = None - package_installer = adapter.adapter_package_installer - if package_installer: - package_installers[clusterid] = package_installer.name - else: - package_installers[clusterid] = None + user = user_api.get_user_object(setting.COMPASS_ADMIN_EMAIL) + hosts = host_api.list_hosts(user) + host_mapping = {} + for host in hosts: + if 'id' not in host: + logging.error('id is not in host %s', host) + continue + host_id = host['id'] + if 'os_name' not in host: + logging.error('os_name is not in host %s', host) + continue + if 'os_installer' not in host: + logging.error('os_installer is not in host %s', host) + continue + host_dirname = setting.HOST_INSTALLATION_LOGDIR_NAME + if host_dirname not in host: + logging.error( + '%s is not in host %s', host_dirname, host + ) + continue + host_state = host_api.get_host_state(user, host_id) + if 'state' not in host_state: + logging.error('state is not in host state %s', host_state) + continue + if host_state['state'] == 'INSTALLING': + host_log_histories = host_api.get_host_log_histories( + user, host_id + ) + host_log_history_mapping = {} + for host_log_history in host_log_histories: + if 'filename' not in host_log_history: + logging.error( + 'filename is not in host log history %s', + host_log_history + ) + continue + host_log_history_mapping[ + host_log_history['filename'] + ] = host_log_history + host_mapping[host_id] = ( + host, host_state, host_log_history_mapping + ) + else: + logging.info( + 'ignore host state %s since it is not in installing', + host_state + ) + adapters = adapter_api.list_adapters(user) + adapter_mapping = {} + for adapter in adapters: + if 'id' not in adapter: + logging.error( + 'id not in adapter %s', adapter + ) + continue + if 'package_installer' not in adapter: + logging.info( + 'package_installer not in adapter %s', adapter + ) + continue + adapter_id = adapter['id'] + adapter_mapping[adapter_id] = adapter + clusters = cluster_api.list_clusters(user) + cluster_mapping = {} + for cluster in clusters: + if 'id' not in cluster: + logging.error('id not in cluster %s', cluster) + continue + cluster_id = cluster['id'] + if 'adapter_id' not in cluster: + logging.error( + 'adapter_id not in cluster %s', + cluster + ) + continue + cluster_state = cluster_api.get_cluster_state(user, cluster_id) + if 'state' not in cluster_state: + logging.error('state not in cluster state %s', cluster_state) + continue + cluster_mapping[cluster_id] = (cluster, cluster_state) + clusterhosts = cluster_api.list_clusterhosts(user) + clusterhost_mapping = {} + for clusterhost in clusterhosts: + if 'clusterhost_id' not in clusterhost: + logging.error( + 'clusterhost_id not in clusterhost %s', + clusterhost + ) + continue + clusterhost_id = clusterhost['clusterhost_id'] + if 'distributed_system_name' not in clusterhost: + logging.error( + 'distributed_system_name is not in clusterhost %s', + clusterhost + ) + continue + clusterhost_dirname = setting.CLUSTERHOST_INATALLATION_LOGDIR_NAME + if clusterhost_dirname not in clusterhost: + logging.error( + '%s is not in clusterhost %s', + clusterhost_dirname, clusterhost + ) + continue + if 'cluster_id' not in clusterhost: + logging.error( + 'cluster_id not in clusterhost %s', + clusterhost + ) + continue + cluster_id = clusterhost['cluster_id'] + if cluster_id not in cluster_mapping: + logging.info( + 'ignore clusterhost %s ' + 'since the cluster_id ' + 'is not in cluster_mapping %s', + clusterhost, cluster_mapping + ) + continue + cluster, _ = cluster_mapping[cluster_id] + adapter_id = cluster['adapter_id'] + if adapter_id not in adapter_mapping: + logging.info( + 'ignore clusterhost %s ' + 'since the adapter_id %s ' + 'is not in adaper_mapping %s', + clusterhost, adapter_id, adapter_mapping + ) + adapter = adapter_mapping[adapter_id] + package_installer = adapter['package_installer'] + clusterhost['package_installer'] = package_installer + clusterhost_state = cluster_api.get_clusterhost_self_state( + user, clusterhost_id + ) + if 'state' not in clusterhost_state: + logging.error( + 'state not in clusterhost_state %s', + clusterhost_state + ) + continue + if clusterhost_state['state'] == 'INSTALLING': + clusterhost_log_histories = ( + cluster_api.get_clusterhost_log_histories( + user, clusterhost_id + ) + ) + clusterhost_log_history_mapping = {} + for clusterhost_log_history in clusterhost_log_histories: + if 'filename' not in clusterhost_log_history: + logging.error( + 'filename not in clusterhost_log_history %s', + clusterhost_log_history + ) + continue + clusterhost_log_history_mapping[ + clusterhost_log_history['filename'] + ] = clusterhost_log_history + clusterhost_mapping[clusterhost_id] = ( + clusterhost, clusterhost_state, + clusterhost_log_history_mapping + ) + else: + logging.info( + 'ignore clusterhost state %s ' + 'since it is not in installing', + clusterhost_state + ) - distributed_system_name = cluster.distributed_system_name - os_name = cluster.os_name - os_names[clusterid] = os_name - distributed_systems[clusterid] = distributed_system_name - - clusterhosts = cluster.clusterhosts - hostids = [clusterhost.host.id for clusterhost in clusterhosts] - cluster_hosts.update({clusterid: hostids}) - - logging.info( - 'update progress for ' - 'os_installers %s,' - 'os_names %s,' - 'package_installers %s,' - 'distributed_systems %s,' - 'cluster_hosts %s', - os_installers, - os_names, - package_installers, - distributed_systems, - cluster_hosts - ) - progress_calculator.update_progress( - os_installers, - os_names, - package_installers, - distributed_systems, - cluster_hosts) + progress_calculator.update_host_progress( + host_mapping) + for host_id, (host, host_state, host_log_history_mapping) in ( + host_mapping.items() + ): + host_api.update_host_state( + user, host_id, + percentage=host_state.get('percentage', 0), + message=host_state.get('message', ''), + severity=host_state.get('severity', 'INFO') + ) + for filename, host_log_history in ( + host_log_history_mapping.items() + ): + host_api.add_host_log_history( + user, host_id, filename=filename, + position=host_log_history.get('position', 0), + percentage=host_log_history.get('percentage', 0), + partial_line=host_log_history.get('partial_line', ''), + message=host_log_history.get('message', ''), + severity=host_log_history.get('severity', 'INFO'), + line_matcher_name=host_log_history.get( + 'line_matcher_name', 'start' + ) + ) + progress_calculator.update_clusterhost_progress( + clusterhost_mapping) + for ( + clusterhost_id, + (clusterhost, clusterhost_state, clusterhost_log_history_mapping) + ) in ( + clusterhost_mapping.items() + ): + cluster_api.update_clusterhost_state( + user, clusterhost_id, + percentage=clusterhost_state.get('percentage', 0), + message=clusterhost_state.get('message', ''), + severity=clusterhost_state.get('severity', 'INFO') + ) + for filename, clusterhost_log_history in ( + clusterhost_log_history_mapping.items() + ): + cluster_api.add_clusterhost_log_history( + user, clusterhost_id, filename=filename, + position=clusterhost_log_history.get('position', 0), + percentage=clusterhost_log_history.get('percentage', 0), + partial_line=clusterhost_log_history.get( + 'partial_line', ''), + message=clusterhost_log_history.get('message', ''), + severity=clusterhost_log_history.get('severity', 'INFO'), + line_matcher_name=( + clusterhost_log_history.get( + 'line_matcher_name', 'start' + ) + ) + ) + progress_calculator.update_cluster_progress( + cluster_mapping) + for cluster_id, (cluster, cluster_state) in cluster_mapping.items(): + cluster_api.update_cluster_state( + user, cluster_id + ) diff --git a/compass/db/api/cluster.py b/compass/db/api/cluster.py index 078591ef..31a11d81 100644 --- a/compass/db/api/cluster.py +++ b/compass/db/api/cluster.py @@ -82,12 +82,12 @@ RESP_CLUSTERHOST_DEPLOYED_CONFIG_FIELDS = [ 'updated_at' ] RESP_STATE_FIELDS = [ - 'id', 'state', 'percentage', 'message', + 'id', 'state', 'percentage', 'message', 'severity' 'status', 'created_at', 'updated_at' ] RESP_CLUSTERHOST_STATE_FIELDS = [ - 'id', 'state', 'percentage', 'message', + 'id', 'state', 'percentage', 'message', 'severity', 'created_at', 'updated_at' ] RESP_REVIEW_FIELDS = [ @@ -125,11 +125,24 @@ UPDATED_CLUSTERHOST_DEPLOYED_CONFIG_FIELDS = [ 'deployed_package_config' ] UPDATED_CLUSTERHOST_STATE_FIELDS = [ - 'state', 'percentage', 'message' + 'state', 'percentage', 'message', 'severity' ] UPDATED_CLUSTER_STATE_FIELDS = [ 'state' ] +RESP_CLUSTERHOST_LOG_FIELDS = [ + 'clusterhost_id', 'id', 'host_id', 'cluster_id', + 'filename', 'position', 'partial_line', + 'percentage', + 'message', 'severity', 'line_matcher_name' +] +ADDED_CLUSTERHOST_LOG_FIELDS = [ + 'filename' +] +UPDATED_CLUSTERHOST_LOG_FIELDS = [ + 'position', 'partial_line', 'percentage', + 'message', 'severity', 'line_matcher_name' +] @utils.supported_filters(optional_support_keys=SUPPORTED_FIELDS) @@ -465,6 +478,17 @@ def add_clusterhost_internal( reinstall_os_set=kwargs.get('reinstall_os', False), exception_when_not_editable=False ): + if 'name' in host_dict: + hostname = host_dict['name'] + host_by_name = utils.get_db_object( + session, models.Host, False, name=hostname + ) + if host_by_name: + raise exception.InvalidParameter( + 'host name %s exists in host %s' % ( + hostname, host_by_name + ) + ) utils.update_db_object( session, host, **host_dict @@ -472,6 +496,17 @@ def add_clusterhost_internal( else: logging.info('host %s is not editable', host.name) else: + if 'name' in host_dict: + hostname = host_dict['name'] + host = utils.get_db_object( + session, models.Host, False, name=hostname + ) + if host: + raise exception.InvalidParameter( + 'host name %s exists in host %s' % ( + hostname, host + ) + ) host = utils.add_db_object( session, models.Host, False, machine_id, os=cluster.os, @@ -1284,6 +1319,26 @@ def get_cluster_host_state( ).state_dict() +@utils.supported_filters([]) +@database.run_in_session() +@user_api.check_user_permission_in_session( + permission.PERMISSION_GET_CLUSTERHOST_STATE +) +@utils.wrap_to_dict(RESP_CLUSTERHOST_STATE_FIELDS) +def get_cluster_host_self_state( + session, getter, cluster_id, host_id, **kwargs +): + """Get clusterhost state info.""" + clusterhost = utils.get_db_object( + session, models.ClusterHost, + cluster_id=cluster_id, host_id=host_id + ) + return utils.get_db_object( + session, models.ClusterHostState, + id=clusterhost.clusterhost_id + ) + + @utils.supported_filters([]) @database.run_in_session() @user_api.check_user_permission_in_session( @@ -1299,6 +1354,21 @@ def get_clusterhost_state( ).state_dict() +@utils.supported_filters([]) +@database.run_in_session() +@user_api.check_user_permission_in_session( + permission.PERMISSION_GET_CLUSTERHOST_STATE +) +@utils.wrap_to_dict(RESP_CLUSTERHOST_STATE_FIELDS) +def get_clusterhost_self_state( + session, getter, clusterhost_id, **kwargs +): + """Get clusterhost state info.""" + return utils.get_db_object( + session, models.ClusterHostState, id=clusterhost_id + ) + + @utils.supported_filters( optional_support_keys=UPDATED_CLUSTERHOST_STATE_FIELDS ) @@ -1355,3 +1425,122 @@ def update_cluster_state( ) utils.update_db_object(session, cluster.state, **kwargs) return cluster.state_dict() + + +@utils.supported_filters([]) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def get_cluster_host_log_histories( + session, getter, cluster_id, host_id, **kwargs +): + """Get clusterhost log history.""" + return utils.list_db_objects( + session, models.ClusterHostLogHistory, + cluster_id=cluster_id, host_id=host_id + ) + + +@utils.supported_filters([]) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def get_clusterhost_log_histories(session, getter, clusterhost_id, **kwargs): + """Get clusterhost log history.""" + return utils.list_db_objects( + session, models.ClusterHostLogHistory, clusterhost_id=clusterhost_id + ) + + +@utils.supported_filters([]) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def get_cluster_host_log_history( + session, getter, cluster_id, host_id, filename, **kwargs +): + """Get clusterhost log history.""" + return utils.get_db_object( + session, models.ClusterHostLogHistory, + cluster_id=cluster_id, host_id=host_id, filename=filename + ) + + +@utils.supported_filters([]) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def get_clusterhost_log_history( + session, getter, clusterhost_id, filename, **kwargs +): + """Get host log history.""" + return utils.get_db_object( + session, models.ClusterHostLogHistory, + clusterhost_id=clusterhost_id, filename=filename + ) + + +@utils.supported_filters( + optional_support_keys=UPDATED_CLUSTERHOST_LOG_FIELDS +) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def update_host_log_history( + session, updater, cluster_id, host_id, filename, **kwargs +): + """Update a host log history.""" + cluster_host_log_history = utils.get_db_object( + session, models.ClusterHostLogHistory, + cluster_id=cluster_id, host_id=host_id, filename=filename + ) + return utils.update_db_object(session, cluster_host_log_history, **kwargs) + + +@utils.supported_filters( + optional_support_keys=UPDATED_CLUSTERHOST_LOG_FIELDS +) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def update_clusterhost_log_history( + session, updater, clusterhost_id, filename, **kwargs +): + """Update a host log history.""" + clusterhost_log_history = utils.get_db_object( + session, models.ClusterHostLogHistory, + clusterhost_id=clusterhost_id, filename=filename + ) + return utils.update_db_object(session, clusterhost_log_history, **kwargs) + + +@utils.supported_filters( + ADDED_CLUSTERHOST_LOG_FIELDS, + optional_support_keys=UPDATED_CLUSTERHOST_LOG_FIELDS +) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def add_clusterhost_log_history( + session, creator, clusterhost_id, exception_when_existing=False, + filename=None, **kwargs +): + """add a host log history.""" + return utils.add_db_object( + session, models.ClusterHostLogHistory, exception_when_existing, + clusterhost_id, filename, **kwargs + ) + + +@utils.supported_filters( + ADDED_CLUSTERHOST_LOG_FIELDS, + optional_support_keys=UPDATED_CLUSTERHOST_LOG_FIELDS +) +@database.run_in_session() +@utils.wrap_to_dict(RESP_CLUSTERHOST_LOG_FIELDS) +def add_cluster_host_log_history( + session, creator, cluster_id, host_id, exception_when_existing=False, + filename=None, **kwargs +): + """add a host log history.""" + clusterhost = utils.get_db_object( + session, models.ClusterHost, + cluster_id=cluster_id, host_id=host_id + ) + return utils.add_db_object( + session, models.ClusterHostLogHistory, exception_when_existing, + clusterhost.clusterhost_id, filename, **kwargs + ) diff --git a/compass/db/api/host.py b/compass/db/api/host.py index ff4bafb4..ca93411a 100644 --- a/compass/db/api/host.py +++ b/compass/db/api/host.py @@ -83,10 +83,21 @@ IGNORED_NETWORK_FIELDS = [ 'interface' ] RESP_STATE_FIELDS = [ - 'id', 'state', 'percentage', 'message' + 'id', 'state', 'percentage', 'message', 'severity' ] UPDATED_STATE_FIELDS = [ - 'state', 'percentage', 'message' + 'state', 'percentage', 'message', 'severity' +] +RESP_LOG_FIELDS = [ + 'id', 'filename', 'position', 'partial_line', 'percentage', + 'message', 'severity', 'line_matcher_name' +] +ADDED_LOG_FIELDS = [ + 'filename' +] +UPDATED_LOG_FIELDS = [ + 'position', 'partial_line', 'percentage', + 'message', 'severity', 'line_matcher_name' ] @@ -255,6 +266,17 @@ def _update_host(session, updater, host_id, **kwargs): session, host, updater, reinstall_os_set=kwargs.get('reinstall_os', False) ) + if 'name' in kwargs: + hostname = kwargs['name'] + host_by_name = utils.get_db_object( + session, models.Host, False, name=hostname + ) + if host_by_name and host_by_name.id != host.id: + raise exception.InvalidParameter( + 'hostname %s is already exists in host %s' % ( + hostname, host_by_name + ) + ) return utils.update_db_object(session, host, **kwargs) @@ -500,7 +522,6 @@ def _add_host_network( host = utils.get_db_object( session, models.Host, id=host_id ) - is_host_editable(session, host, creator) ip_int = long(netaddr.IPAddress(ip)) host_network = utils.get_db_object( session, models.HostNetwork, False, @@ -508,8 +529,11 @@ def _add_host_network( ) if host_network: raise exception.InvalidParameter( - 'ip %s exists in database' % ip + 'ip %s exists in host network %s' % ( + ip, host_network + ) ) + is_host_editable(session, host, creator) return utils.add_db_object( session, models.HostNetwork, exception_when_existing, @@ -597,6 +621,24 @@ def update_host_network( host_id, host_network_id ) ) + if 'ip' in kwargs: + ip = kwargs['ip'] + ip_int = long(netaddr.IPAddress(ip)) + host_network_by_ip = utils.get_db_object( + session, models.HostNetwork, False, + ip_int=ip_int + ) + if host_network_by_ip and host_network_by_ip.id != host_network.id: + raise exception.InvalidParameter( + 'ip %s exist in host network %s' % ( + ip, host_network_by_ip + ) + ) + if host_network: + raise exception.InvalidParameter( + 'ip %s exists in database' % ip + ) + is_host_editable(session, host_network.host, updater) return utils.update_db_object(session, host_network, **kwargs) @@ -687,6 +729,53 @@ def update_host_state(session, updater, host_id, **kwargs): return host.state_dict() +@utils.supported_filters([]) +@database.run_in_session() +@utils.wrap_to_dict(RESP_LOG_FIELDS) +def get_host_log_histories(session, getter, host_id, **kwargs): + """Get host log history.""" + return utils.list_db_objects( + session, models.HostLogHistory, id=host_id + ) + + +@utils.supported_filters([]) +@database.run_in_session() +@utils.wrap_to_dict(RESP_LOG_FIELDS) +def get_host_log_history(session, getter, host_id, filename, **kwargs): + """Get host log history.""" + return utils.get_db_object( + session, models.HostLogHistory, id=host_id, filename=filename + ) + + +@utils.supported_filters(optional_support_keys=UPDATED_LOG_FIELDS) +@database.run_in_session() +@utils.wrap_to_dict(RESP_LOG_FIELDS) +def update_host_log_history(session, updater, host_id, filename, **kwargs): + """Update a host log history.""" + host_log_history = utils.get_db_object( + session, models.HostLogHistory, id=host_id, filename=filename + ) + return utils.update_db_object(session, host_log_history, **kwargs) + + +@utils.supported_filters( + ADDED_LOG_FIELDS, + optional_support_keys=UPDATED_LOG_FIELDS) +@database.run_in_session() +@utils.wrap_to_dict(RESP_LOG_FIELDS) +def add_host_log_history( + session, creator, host_id, exception_when_existing=False, + filename=None, **kwargs +): + """add a host log history.""" + return utils.add_db_object( + session, models.HostLogHistory, exception_when_existing, + host_id, filename, **kwargs + ) + + @utils.supported_filters(optional_support_keys=['poweron']) @database.run_in_session() @user_api.check_user_permission_in_session( diff --git a/compass/db/api/utils.py b/compass/db/api/utils.py index c6d524c8..db2345e2 100644 --- a/compass/db/api/utils.py +++ b/compass/db/api/utils.py @@ -265,9 +265,6 @@ def supported_filters( def decorator(func): @functools.wraps(func) def wrapper(*args, **filters): - logging.info('support_keys: %s', support_keys) - logging.info('optional_support_keys: %s', optional_support_keys) - logging.info('ignore_support_keys: %s', ignore_support_keys) must_support_keys = set(support_keys) all_support_keys = must_support_keys | set(optional_support_keys) filter_keys = set(filters) @@ -653,6 +650,6 @@ def check_switch_credentials(credentials): ) else: logging.debug( - 'function %s is not defined in %s', - key_check_func_name, this_module + 'function %s is not defined', + key_check_func_name ) diff --git a/compass/db/models.py b/compass/db/models.py index 2abe50a7..8ba76f5a 100644 --- a/compass/db/models.py +++ b/compass/db/models.py @@ -481,6 +481,7 @@ class ClusterHostState(BASE, StateMixin): ) def update(self): + super(ClusterHostState, self).update() host_state = self.clusterhost.host.state if self.state == 'INITIALIZED': if host_state.state in ['UNINITIALIZED']: @@ -494,7 +495,6 @@ class ClusterHostState(BASE, StateMixin): if host_state.state != 'SUCCESSFUL': host_state.state = 'SUCCESSFUL' host_state.update() - super(ClusterHostState, self).update() class ClusterHost(BASE, TimestampMixin, HelperMixin): @@ -689,13 +689,27 @@ class ClusterHost(BASE, TimestampMixin, HelperMixin): def state_dict(self): cluster = self.cluster host = self.host + host_state = host.state_dict() + if not cluster.distributed_system: + return host_state + clusterhost_state = self.state.to_dict() + if clusterhost_state['state'] in ['ERROR', 'SUCCESSFUL']: + return clusterhost_state if ( - not cluster.distributed_system or - host.state.state != 'SUCCESSFUL' + clusterhost_state['state'] in 'INSTALLING' and + clusterhost_state['percentage'] > 0 ): - return host.state_dict() - else: - return self.state.to_dict() + clusterhost_state['percentage'] = min( + 1.0, ( + 0.5 + clusterhost_state['percentage'] / 2 + ) + ) + return clusterhost_state + + host_state['percentage'] = host_state['percentage'] / 2 + if host_state['state'] == 'SUCCESSFUL': + host_state['state'] = 'INSTALLING' + return host_state def to_dict(self): dict_info = self.host.to_dict() @@ -729,6 +743,7 @@ class HostState(BASE, StateMixin): ) def update(self): + super(HostState, self).update() host = self.host if self.state == 'INSTALLING': host.reinstall_os = False @@ -752,7 +767,6 @@ class HostState(BASE, StateMixin): ]: clusterhost.state = 'INITIALIZED' clusterhost.state.update() - super(HostState, self).update() class Host(BASE, TimestampMixin, HelperMixin): @@ -955,7 +969,7 @@ class ClusterState(BASE, StateMixin): cluster = self.cluster clusterhosts = cluster.clusterhosts self.total_hosts = len(clusterhosts) - if self.state in ['UNINITIALIZED', 'INITIALIZED']: + if self.state in ['UNINITIALIZED', 'INITIALIZED', 'INSTALLING']: self.installing_hosts = 0 self.failed_hosts = 0 self.completed_hosts = 0 @@ -981,16 +995,19 @@ class ClusterState(BASE, StateMixin): elif clusterhost_state == 'SUCCESSFUL': self.completed_hosts += 1 if self.total_hosts: - self.percentage = ( - float(self.completed_hosts) - / - float(self.total_hosts) - ) + if self.completed_hosts == self.total_hosts: + self.percentage = 1.0 + else: + self.percentage = ( + float(self.completed_hosts) + / + float(self.total_hosts) + ) self.message = ( 'total %s, installing %s, completed: %s, error %s' ) % ( - self.total_hosts, self.completed_hosts, - self.installing_hosts, self.failed_hosts + self.total_hosts, self.installing_hosts, + self.completed_hosts, self.failed_hosts ) if self.failed_hosts: self.severity = 'ERROR' diff --git a/compass/log_analyzor/adapter_matcher.py b/compass/log_analyzor/adapter_matcher.py index 02dac125..4d648298 100644 --- a/compass/log_analyzor/adapter_matcher.py +++ b/compass/log_analyzor/adapter_matcher.py @@ -16,71 +16,60 @@ .. moduleauthor:: Xiaodong Wang """ -import datetime import logging import re -from compass.db.api import cluster as cluster_api -from compass.db.api import database -from compass.db.api import host as host_api -from compass.db.api import user as user_api - -from compass.db.models import Cluster -from compass.db.models import ClusterHost -from compass.db.models import Host - -from compass.log_analyzor.line_matcher import Progress -from compass.utils import setting_wrapper as setting - class AdapterItemMatcher(object): """Progress matcher for the os installing or package installing.""" def __init__(self, file_matchers): self.file_matchers_ = file_matchers - self.min_progress_ = 0.0 - self.max_progress_ = 1.0 - - def update_progress_range(self, min_progress, max_progress): - """update min_progress and max_progress.""" - self.min_progress_ = min_progress - self.max_progress_ = max_progress - for file_matcher in self.file_matchers_: - file_matcher.update_absolute_progress_range( - self.min_progress_, self.max_progress_) def __str__(self): return '%s[file_matchers: %s, min_progress: %s, max_progress: %s]' % ( - self.__class__.__name__, self.file_matchers_, - self.min_progress_, self.max_progress_) + self.__class__.__name__, self.file_matchers_ + ) - def update_progress(self, fullname, progress): + def update_progress( + self, file_reader_factory, name, state, log_history_mapping + ): """Update progress. - :param fullname: the fullname of the installing host. - :type fullname: str + :param name: the fullname of the installing host. + :type name: str :param progress: Progress instance to update. """ for file_matcher in self.file_matchers_: - file_matcher.update_progress(fullname, progress) + filename = file_matcher.filename_ + if filename not in log_history_mapping: + log_history_mapping[filename] = { + 'filename': filename, + 'partial_line': '', + 'position': 0, + 'line_matcher_name': 'start', + 'percentage': 0.0, + 'message': '', + 'severity': 'INFO' + } + log_history = log_history_mapping[filename] + file_matcher.update_progress( + file_reader_factory, name, state, log_history + ) class OSMatcher(object): """Progress matcher for os installer.""" - def __init__(self, os_installer_name, os_pattern, - item_matcher, min_progress, max_progress): - if not (0.0 <= min_progress <= max_progress <= 1.0): - raise IndexError('%s restriction not mat:' - '0.0 <= min_progress(%s) ' - '<= max_progress(%s) <= 1.0' % ( - self.__class__.__name__, - min_progress, max_progress)) - + def __init__( + self, os_installer_name, + os_pattern, item_matcher, + file_reader_factory + ): self.name_ = os_installer_name self.os_regex_ = re.compile(os_pattern) self.matcher_ = item_matcher - self.matcher_.update_progress_range(min_progress, max_progress) + self.file_reader_factory_ = file_reader_factory def __repr__(self): return '%s[name:%s, os_pattern:%s, matcher:%s]' % ( @@ -97,425 +86,41 @@ class OSMatcher(object): self.os_regex_.match(os_name) ]) - def update_progress(self, fullname, progress): + def update_progress(self, name, state, log_history_mapping): """Update progress.""" - logging.debug('selfname: %s', self.name_) - self.matcher_.update_progress(fullname, progress) + self.matcher_.update_progress( + self.file_reader_factory_, name, state, log_history_mapping) class PackageMatcher(object): """Progress matcher for package installer.""" - def __init__(self, package_installer_name, target_system, - item_matcher, min_progress, max_progress): - if not (0.0 <= min_progress <= max_progress <= 1.0): - raise IndexError('%s restriction not mat:' - '0.0 <= min_progress(%s) ' - '<= max_progress(%s) <= 1.0' % ( - self.__class__.__name__, - min_progress, max_progress)) - + def __init__( + self, package_installer_name, distributed_system_pattern, + item_matcher, file_reader_factory + ): self.name_ = re.compile(package_installer_name) - self.target_system_ = target_system + self.ds_regex_ = re.compile(distributed_system_pattern) self.matcher_ = item_matcher - self.matcher_.update_progress_range(min_progress, max_progress) + self.file_reader_factory_ = file_reader_factory def __repr__(self): return '%s[name:%s, target_system:%s, matcher:%s]' % ( self.__class__.__name__, self.name_, self.target_system_, self.matcher_) - def match(self, package_installer_name, target_system): + def match(self, package_installer_name, distributed_system_name): """Check if the package matcher is acceptable.""" if package_installer_name is None: return False else: return all([ self.name_.match(package_installer_name), - self.target_system_ == target_system + self.ds_regex_.match(distributed_system_name) ]) - def update_progress(self, fullname, progress): + def update_progress(self, name, state, log_history_mapping): """Update progress.""" - self.matcher_.update_progress(fullname, progress) - - -class AdapterMatcher(object): - """Adapter matcher to update adapter installing progress.""" - - def __init__(self, os_matcher, package_matcher): - self.os_matcher_ = os_matcher - self.package_matcher_ = package_matcher - - def match(self, os_installer_name, os_name, - package_installer_name, target_system): - """Check if the adapter matcher is acceptable. - - :param os_installer_name: the os installer name. - :type os_installer_name: str - :param os_name: the os name. - :type os_name: str - :param package_installer_name: the package installer name. - :type package_installer_name: str - :param target_system: the target system to deploy - :type target_system: str - - :returns: bool - - .. note:: - Return True if the AdapterMatcher can process the log files - generated from the os installation and package installation. - """ - return all([ - self.os_matcher_.match(os_installer_name, os_name), - self.package_matcher_.match( - package_installer_name, target_system)]) - - def __str__(self): - return '%s[os_matcher:%s, package_matcher:%s]' % ( - self.__class__.__name__, - self.os_matcher_, self.package_matcher_) - - @classmethod - def _get_host_progress(cls, hostid): - """Get Host Progress from HostState.""" - - session = database.current_session() - host = session.query( - Host - ).filter_by(id=hostid).first() - if not host: - logging.error( - 'there is no host for %s in Host', hostid) - return None, None, None - - if not host.state: - logging.error('there is no related HostState for %s', - hostid) - return host.name, None, None - - return ( - host.name, - host.state.state, - Progress(host.state.percentage, - host.state.message, - host.state.severity)) - - @classmethod - def _get_clusterhost_progress(cls, hostid): - """Get ClusterHost progress from ClusterHostState.""" - - session = database.current_session() - clusterhost = session.query( - ClusterHost - ).filter_by(host_id=hostid).first() - if not clusterhost: - logging.error( - 'there is no clusterhost for %s in ClusterHost', - hostid - ) - return None, None, None - - if not clusterhost.state: - logging.error( - 'there is no related ClusterHostState for %s', - hostid - ) - return clusterhost.name, None, None - - return ( - clusterhost.name, - clusterhost.state.state, - Progress(clusterhost.state.percentage, - clusterhost.state.message, - clusterhost.state.severity)) - - @classmethod - def _update_host_progress(cls, hostid, host_progress, updater): - """Updates host progress to db.""" - - state = 'INSTALLING' - with database.session() as session: - host = session.query( - Host).filter_by(id=hostid).first() - if not host: - logging.error( - 'there is no host for %s in table Host', - hostid - ) - - if not host.state: - logging.error( - 'there is no related HostState for %s', - hostid - ) - - if host.state.percentage > host_progress.progress: - logging.error( - 'host %s progress has not been increased' - ' from %s to $s', - hostid, host.state, host_progress - ) - return - if ( - host.state.percentage == host_progress.progress and - host.state.message == host_progress.message - ): - logging.info( - 'host %s update ignored due to same progress' - 'in database', - hostid - ) - return - - if host.state.percentage >= 1.0: - state = 'SUCCESSFUL' - if host.state.severity == 'ERROR': - state = 'ERROR' - - logging.info('update host state by %s', updater) - host_api.update_host_state( - updater, - hostid, - state=state, - percentage=host_progress.progress, - message=host_progress.message + self.matcher_.update_progress( + self.file_reader_factory_, name, state, log_history_mapping ) - - logging.debug( - 'update host %s state %s', - hostid, state) - - @classmethod - def _update_clusterhost_progress( - cls, - clusterid, - hostid, - clusterhost_progress, - updater - ): - - clusterhost_state = 'INSTALLING' - with database.session() as session: - clusterhost = session.query( - ClusterHost).filter_by(host_id=hostid).first() - - if not clusterhost.state: - logging.error( - 'ClusterHost state not found for %s', - hostid) - - if clusterhost.state.percentage > clusterhost_progress.progress: - logging.error( - 'clusterhost %s state has not been increased' - ' from %s to %s', - hostid, clusterhost.state, clusterhost_progress - ) - return - - if ( - clusterhost.state.percentage == - clusterhost_progress.progress and - clusterhost.state.message == clusterhost_progress.message - ): - logging.info( - 'clusterhost %s update ignored due to same progress' - 'in database', - hostid - ) - return - - if clusterhost.state.percentage >= 1.0: - clusterhost_state = 'SUCCESSFUL' - - if clusterhost.state.severity == 'ERROR': - clusterhost_state = 'ERROR' - - logging.info('updatge clusterhost state by %s', updater) - cluster_api.update_cluster_host_state( - updater, - clusterid, - hostid, - state=clusterhost_state, - percentage=clusterhost_progress.progress, - message=clusterhost_progress.message - ) - - logging.debug( - 'update clusterhost %s state %s', - hostid, clusterhost_state) - - @classmethod - def _update_cluster_progress(cls, clusterid): - """Update cluster installing progress to database. - - .. note:: - The function should be called in the database session. - """ - session = database.current_session() - cluster = session.query( - Cluster).filter_by(id=clusterid).first() - if not cluster: - logging.error( - 'there is no cluster for %s in Cluster', - clusterid) - return - - if not cluster.state: - logging.error( - 'there is no ClusterState for %s', - clusterid) - - if cluster.state.state != 'INSTALLING': - logging.error('cluster %s is not in INSTALLING state', - clusterid) - return - - cluster_progress = 0.0 - cluster_messages = {} - cluster_severities = set([]) - cluster_installing_hosts = 0 - cluster_completed_hosts = 0 - cluster_failed_hosts = 0 - clusterhosts = cluster.clusterhosts - if not cluster.distributed_system: - hosts = [clusterhost.host for clusterhost in clusterhosts] - for host in hosts: - if host.state: - cluster_progress += host.state.percentage - if host.state.state == 'INSTALLING': - cluster_installing_hosts += 1 - elif host.state.state == 'SUCCESSFUL': - cluster_completed_hosts += 1 - elif host.state.state == 'ERROR': - cluster_failed_hosts += 1 - if host.state.message: - cluster_messages[host.name] = host.state.message - if host.state.severity: - cluster_severities.add(host.state.severity) - else: - for clusterhost in clusterhosts: - if clusterhost.state: - cluster_progress += clusterhost.state.percentage - if clusterhost.state.state == 'INSTALLING': - cluster_installing_hosts += 1 - elif clusterhost.state.state == 'SUCCESSFUL': - cluster_completed_hosts += 1 - elif clusterhost.state.state == 'ERROR': - cluster_failed_hosts += 1 - if clusterhost.state.message: - cluster_messages[clusterhost.name] = ( - clusterhost.state.message - ) - if clusterhost.state.severity: - cluster_severities.add(clusterhost.state.severity) - - cluster.state.percentage = ( - float(cluster_completed_hosts) / float(cluster.state.total_hosts) - ) - cluster.state.message = '\n'.join( - [ - '%s: %s' % (hostname, message) - for hostname, message in cluster_messages.items() - ] - ) - for severity in ['ERROR', 'WARNING', 'INFO']: - if severity in cluster_severities: - cluster.state.severity = severity - break - - if cluster.state.percentage >= 1.0: - cluster.state.state = 'SUCCESSFUL' - - if cluster.state.severity == 'ERROR': - cluster.state.state = 'ERROR' - - cluster.state.installing_hosts = cluster_installing_hosts - cluster.state.total_hosts = len(clusterhosts) - cluster.state.failed_hosts = cluster_failed_hosts - cluster.state.completed_hosts = cluster_completed_hosts - - logging.debug( - 'update cluster %s state %s', - clusterid, cluster.state) - - def update_progress(self, clusterid, hostids): - - host_progresses = {} - clusterhost_progresses = {} - updater = user_api.get_user_object( - setting.COMPASS_ADMIN_EMAIL - ) - with database.session(): - for hostid in hostids: - host_name, host_state, host_progress = \ - self._get_host_progress(hostid) - _, clusterhost_state, clusterhost_progress = \ - self._get_clusterhost_progress(hostid) - - if (not host_name or - not host_progress or - not clusterhost_progress): - logging.error( - 'nothing to update host %s', - host_name) - continue - - logging.debug('got host %s host_state: %s ' - 'host_progress: %s, ' - 'clusterhost_state: %s, ' - 'clusterhost_progress: %s ', - host_name, - host_state, - host_progress, - clusterhost_state, - clusterhost_progress) - - host_progresses[hostid] = ( - host_name, host_state, host_progress) - clusterhost_progresses[hostid] = ( - host_name, clusterhost_state, clusterhost_progress) - - for hostid, host_value in host_progresses.items(): - host_name, host_state, host_progress = host_value - if (host_state == 'INSTALLING' and - host_progress.progress < 1.0): - self.os_matcher_.update_progress( - host_name, host_progress) - else: - logging.error( - 'there is no need to update host %s ' - 'progress: state %s progress %s', - host_name, host_state, host_progress) - - for hostid, clusterhost_value in clusterhost_progresses.items(): - host_name, clusterhost_state, clusterhost_progress = \ - clusterhost_value - if (clusterhost_state == 'INSTALLING' and - clusterhost_progress.progress < 1.0): - self.package_matcher_.update_progress( - host_name, clusterhost_progress) - else: - logging.error( - 'no need to update clusterhost %s' - 'progress: state %s progress %s', - host_name, clusterhost_state, clusterhost_progress) - - for hostid in hostids: - if hostid not in host_progresses: - continue - if hostid not in clusterhost_progresses: - continue - - _, _, host_progress = host_progresses[hostid] - _, _, clusterhost_progress = clusterhost_progresses[hostid] - self._update_host_progress(hostid, host_progress, updater) - self._update_clusterhost_progress( - clusterid, - hostid, - clusterhost_progress, - updater - ) - - with database.session(): - self._update_cluster_progress(clusterid) diff --git a/compass/log_analyzor/file_matcher.py b/compass/log_analyzor/file_matcher.py index 535de8a4..7e57247d 100644 --- a/compass/log_analyzor/file_matcher.py +++ b/compass/log_analyzor/file_matcher.py @@ -19,9 +19,6 @@ import logging import os.path -from compass.db.api import database -from compass.db.models import LogProgressingHistory -from compass.log_analyzor.line_matcher import Progress from compass.utils import setting_wrapper as setting @@ -83,114 +80,43 @@ class FileReader(object): it has read last time. and update the position when it finish reading the log. """ - def __init__(self, pathname): + def __init__(self, pathname, log_history): self.pathname_ = pathname - self.position_ = 0 - self.partial_line_ = '' + self.log_history_ = log_history def __repr__(self): return ( - '%s[pathname:%s, position:%s, partial_line:%s]' % ( - self.__class__.__name__, self.pathname_, self.position_, - self.partial_line_ + '%s[pathname:%s, log_history:%s]' % ( + self.__class__.__name__, self.pathname_, + self.log_history_ ) ) - def get_history(self): - """Get log file read history from database. - - :returns: (line_matcher_name progress) - - .. note:: - The function should be called out of database session. - It reads the log_progressing_history table to get the - position in the log file it has read in last run, - the partial line of the log, the line matcher name - in the last run, the progress, the message and the - severity it has got in the last run. - """ - session = database.current_session() - history = session.query( - LogProgressingHistory - ).filter_by( - pathname=self.pathname_ - ).first() - if history: - self.position_ = history.position - self.partial_line_ = history.partial_line - line_matcher_name = history.line_matcher_name - progress = Progress(history.percentage, - history.message, - history.severity) - else: - line_matcher_name = 'start' - progress = Progress(0.0, '', None) - - return line_matcher_name, progress - - def update_history(self, line_matcher_name, progress): - """Update log_progressing_history table. - - :param line_matcher_name: the line matcher name. - :param progress: Progress instance to record the installing progress. - - .. note:: - The function should be called out of database session. - It updates the log_processing_history table. - """ - session = database.current_session() - history = session.query(LogProgressingHistory).filter_by( - pathname=self.pathname_).first() - if history: - if history.position >= self.position_: - logging.error( - '%s history position %s is ahead of current ' - 'position %s', - self.pathname_, - history.position, - self.position_) - return - - history.position = self.position_ - history.partial_line = self.partial_line_ - history.line_matcher_name = line_matcher_name - history.progress = progress.progress - history.message = progress.message - history.severity = progress.severity - else: - history = LogProgressingHistory( - pathname=self.pathname_, position=self.position_, - partial_line=self.partial_line_, - line_matcher_name=line_matcher_name, - percentage=progress.progress, - message=progress.message, - severity=progress.severity) - session.merge(history) - logging.debug('update file %s to history %s', - self.pathname_, history) - def readline(self): """Generate each line of the log file.""" - old_position = self.position_ + old_position = self.log_history_['position'] + position = self.log_history_['position'] + partial_line = self.log_history_['partial_line'] try: with open(self.pathname_) as logfile: - logfile.seek(self.position_) + logfile.seek(position) while True: line = logfile.readline() - self.partial_line_ += line + partial_line += line position = logfile.tell() - if position > self.position_: - self.position_ = position + if position > self.log_history_['position']: + self.log_history_['position'] = position - if self.partial_line_.endswith('\n'): - yield_line = self.partial_line_ - self.partial_line_ = '' - yield yield_line + if partial_line.endswith('\n'): + self.log_history_['partial_line'] = '' + yield partial_line + partial_line = self.log_history_['partial_line'] else: break - - if self.partial_line_: - yield self.partial_line_ + if partial_line: + self.log_history_['partial_line'] = '' + yield partial_line + partial_line = self.log_history_['partial_line'] except Exception as error: logging.error('failed to processing file %s', self.pathname_) @@ -198,22 +124,22 @@ class FileReader(object): logging.debug( 'processing file %s log %s bytes to position %s', - self.pathname_, self.position_ - old_position, - self.position_) + self.pathname_, position - old_position, position + ) class FileReaderFactory(object): """factory class to create FileReader instance.""" - def __init__(self, logdir, filefilter): + def __init__(self, logdir): self.logdir_ = logdir - self.filefilter_ = filefilter + self.filefilter_ = get_file_filter() def __str__(self): return '%s[logdir: %s filefilter: %s]' % ( self.__class__.__name__, self.logdir_, self.filefilter_) - def get_file_reader(self, fullname, filename): + def get_file_reader(self, hostname, filename, log_history): """Get FileReader instance. :param fullname: fullname of installing host. @@ -221,17 +147,13 @@ class FileReaderFactory(object): :returns: :class:`FileReader` instance if it is not filtered. """ - pathname = os.path.join(self.logdir_, fullname, filename) + pathname = os.path.join(self.logdir_, hostname, filename) logging.debug('get FileReader from %s', pathname) if not self.filefilter_.filter(pathname): logging.error('%s is filtered', pathname) return None - return FileReader(pathname) - - -FILE_READER_FACTORY = FileReaderFactory( - setting.INSTALLATION_LOGDIR, get_file_filter()) + return FileReader(pathname, log_history) class FileMatcher(object): @@ -244,71 +166,52 @@ class FileMatcher(object): self.__class__.__name__, min_progress, max_progress)) - + if 'start' not in line_matchers: + raise KeyError( + 'key `start` does not in line matchers %s' % line_matchers + ) self.line_matchers_ = line_matchers self.min_progress_ = min_progress self.max_progress_ = max_progress - self.absolute_min_progress_ = 0.0 - self.absolute_max_progress_ = 1.0 - self.absolute_progress_diff_ = 1.0 + self.progress_diff_ = max_progress - min_progress self.filename_ = filename - def update_absolute_progress_range(self, min_progress, max_progress): - """update the min progress and max progress the log file indicates.""" - progress_diff = max_progress - min_progress - self.absolute_min_progress_ = ( - min_progress + self.min_progress_ * progress_diff) - self.absolute_max_progress_ = ( - min_progress + self.max_progress_ * progress_diff) - self.absolute_progress_diff_ = ( - self.absolute_max_progress_ - self.absolute_min_progress_) - def __str__(self): return ( - '%s[ filename: %s, progress range: [%s:%s], ' + '%s[ filename: %s, progress:[%s:%s], ' 'line_matchers: %s]' % ( self.__class__.__name__, self.filename_, - self.absolute_min_progress_, - self.absolute_max_progress_, self.line_matchers_) + self.min_progress_, + self.max_progress_, self.line_matchers_) ) - def update_total_progress(self, file_progress, total_progress): - """Get the total progress from file progress.""" - if not file_progress.message: - logging.info( - 'ignore update file %s progress %s to total progress', - self.filename_, file_progress) - return - - total_progress_data = min( - ( - self.absolute_min_progress_ + ( - file_progress.progress * self.absolute_progress_diff_ - ) - ), - self.absolute_max_progress_ + def update_progress_from_log_history(self, state, log_history): + file_percentage = log_history['percentage'] + percentage = max( + self.min_progress_, + min( + self.max_progress_, + self.min_progress_ + file_percentage * self.progress_diff_ + ) ) - - # total progress should only be updated when the new calculated - # progress is greater than the recored total progress or the - # progress to update is the same but the message is different. if ( - total_progress.progress < total_progress_data or ( - total_progress.progress == total_progress_data and - total_progress.message != file_progress.message + percentage > state['percentage'] or + ( + percentage == state['percentage'] and + log_history['message'] != state['message'] ) ): - total_progress.progress = total_progress_data - total_progress.message = file_progress.message - total_progress.severity = file_progress.severity - logging.debug('update file %s total progress %s', - self.filename_, total_progress) + state['percentage'] = percentage + state['message'] = log_history['message'] + state['severity'] = log_history['severity'] else: - logging.info( - 'ignore update file %s progress %s to total progress %s', - self.filename_, file_progress, total_progress) + logging.debug( + 'ingore update state %s from log history %s ' + 'since the updated progress %s lag behind', + state, log_history, percentage + ) - def update_progress(self, fullname, total_progress): + def update_progress(self, file_reader_factory, name, state, log_history): """update progress from file. :param fullname: the fullname of the installing host. @@ -324,23 +227,27 @@ class FileMatcher(object): run, it will be reprocessed at the beginning because there is no line end indicator for the last line of the file. """ - file_reader = FILE_READER_FACTORY.get_file_reader( - fullname, self.filename_) + file_reader = file_reader_factory.get_file_reader( + name, self.filename_, log_history) if not file_reader: return - line_matcher_name, file_progress = file_reader.get_history() + line_matcher_name = log_history['line_matcher_name'] for line in file_reader.readline(): if line_matcher_name not in self.line_matchers_: logging.debug('early exit at\n%s\nbecause %s is not in %s', line, line_matcher_name, self.line_matchers_) break - index = line_matcher_name - while index in self.line_matchers_: - line_matcher = self.line_matchers_[index] - index, line_matcher_name = line_matcher.update_progress( - line, file_progress) - - file_reader.update_history(line_matcher_name, file_progress) - self.update_total_progress(file_progress, total_progress) + same_line_matcher_name = line_matcher_name + while same_line_matcher_name in self.line_matchers_: + line_matcher = self.line_matchers_[same_line_matcher_name] + same_line_matcher_name, line_matcher_name = ( + line_matcher.update_progress(line, log_history) + ) + log_history['line_matcher_name'] = line_matcher_name + logging.debug( + 'updated log history %s after processing %s', + log_history, self + ) + self.update_progress_from_log_history(state, log_history) diff --git a/compass/log_analyzor/line_matcher.py b/compass/log_analyzor/line_matcher.py index 81305792..0bc25951 100644 --- a/compass/log_analyzor/line_matcher.py +++ b/compass/log_analyzor/line_matcher.py @@ -21,28 +21,6 @@ from abc import ABCMeta from compass.utils import util -class Progress(object): - """Progress object to store installing progress and message.""" - - def __init__(self, progress, message, severity): - """Constructor - - :param progress: installing progress between 0 to 1. - :param message: installing message. - :param severity: installing message severity. - """ - self.progress = progress - self.message = message - self.severity = severity - - def __repr__(self): - return '%s[progress:%s, message:%s, severity:%s]' % ( - self.__class__.__name__, - self.progress, - self.message, - self.severity) - - class ProgressCalculator(object): """base class to generate progress.""" @@ -51,7 +29,7 @@ class ProgressCalculator(object): @classmethod def update_progress( cls, progress_data, message, - severity, progress + severity, log_history ): """Update progress with the given progress_data, message and severity. @@ -65,24 +43,22 @@ class ProgressCalculator(object): # is greater than the stored progress or the progress # to update is the same but the message is different. if ( - progress_data > progress.progress or ( - progress_data == progress.progress and - message != progress.message + progress_data > log_history['percentage'] or ( + progress_data == log_history['percentage'] and + message != log_history['message'] ) ): - progress.progress = progress_data + log_history['percentage'] = progress_data if message: - progress.message = message - + log_history['message'] = message if severity: - progress.severity = severity - - logging.debug('update progress to %s', progress) + log_history['severity'] = severity + logging.debug('update progress to %s', log_history) else: logging.info('ignore update progress %s to %s', - progress_data, progress) + progress_data, log_history) - def update(self, message, severity, progress): + def update(self, message, severity, log_history): """vritual method to update progress by message and severity. :param message: installing message. @@ -125,17 +101,17 @@ class IncrementalProgress(ProgressCalculator): self.incremental_progress_ ) - def update(self, message, severity, progress): + def update(self, message, severity, log_history): """update progress from message and severity.""" progress_data = max( self.min_progress_, min( self.max_progress_, - progress.progress + self.incremental_progress_ + log_history['percentage'] + self.incremental_progress_ ) ) self.update_progress(progress_data, - message, severity, progress) + message, severity, log_history) class RelativeProgress(ProgressCalculator): @@ -153,19 +129,19 @@ class RelativeProgress(ProgressCalculator): def __str__(self): return '%s[%s]' % (self.__class__.__name__, self.progress_) - def update(self, message, severity, progress): + def update(self, message, severity, log_history): """update progress from message and severity.""" self.update_progress( - self.progress_, message, severity, progress) + self.progress_, message, severity, log_history) class SameProgress(ProgressCalculator): """class to update message and severity for progress.""" - def update(self, message, severity, progress): + def update(self, message, severity, log_history): """update progress from the message and severity.""" - self.update_progress(progress.progress, message, - severity, progress) + self.update_progress(log_history['percentage'], message, + severity, log_history) class LineMatcher(object): @@ -201,7 +177,7 @@ class LineMatcher(object): self.__class__.__name__, self.regex_.pattern, self.message_template_, self.severity_) - def update_progress(self, line, progress): + def update_progress(self, line, log_history): """Update progress by the line. :param line: one line in log file to indicate the installing progress. @@ -209,7 +185,7 @@ class LineMatcher(object): The line may be partial if the latest line of the log file is not the whole line. But the whole line may be resent in the next run. - :praam progress: the :class:`Progress` instance to update. + :param progress: the :class:`Progress` instance to update. """ mat = self.regex_.search(line) if not mat: @@ -224,7 +200,7 @@ class LineMatcher(object): self.message_template_, mat.groupdict(), self) raise error - self.progress_.update(message, self.severity_, progress) + self.progress_.update(message, self.severity_, log_history) return ( self.match_sameline_, self.match_nextline_) diff --git a/compass/log_analyzor/progress_calculator.py b/compass/log_analyzor/progress_calculator.py index 4acaf93c..840a6d57 100644 --- a/compass/log_analyzor/progress_calculator.py +++ b/compass/log_analyzor/progress_calculator.py @@ -19,456 +19,517 @@ import logging from compass.log_analyzor.adapter_matcher import AdapterItemMatcher -from compass.log_analyzor.adapter_matcher import AdapterMatcher from compass.log_analyzor.adapter_matcher import OSMatcher from compass.log_analyzor.adapter_matcher import PackageMatcher from compass.log_analyzor.file_matcher import FileMatcher +from compass.log_analyzor.file_matcher import FileReaderFactory from compass.log_analyzor.line_matcher import IncrementalProgress from compass.log_analyzor.line_matcher import LineMatcher +from compass.utils import setting_wrapper as setting + # TODO(weidong): reconsider intialization method for the following. OS_INSTALLER_CONFIGURATIONS = { - 'Ubuntu': AdapterItemMatcher( - file_matchers=[ - FileMatcher( - filename='syslog', - min_progress=0.0, - max_progress=1.0, - line_matchers={ - 'start': LineMatcher( - pattern=r'.*', - progress=.05, - message_template='start installing', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='ethdetect' - ), - 'ethdetect': LineMatcher( - pattern=r'Menu.*item.*\'ethdetect\'.*selected', - progress=.1, - message_template='ethdetect selected', - unmatch_nextline_next_matcher_name='ethdetect', - match_nextline_next_matcher_name='netcfg' - ), - 'netcfg': LineMatcher( - pattern=r'Menu.*item.*\'netcfg\'.*selected', - progress=.12, - message_template='netcfg selected', - unmatch_nextline_next_matcher_name='netcfg', - match_nextline_next_matcher_name='network-preseed' - ), - 'network-preseed': LineMatcher( - pattern=r'Menu.*item.*\'network-preseed\'.*selected', - progress=.15, - message_template='network-preseed selected', - unmatch_nextline_next_matcher_name='network-preseed', - match_nextline_next_matcher_name='localechooser' - ), - 'localechoose': LineMatcher( - pattern=r'Menu.*item.*\'localechooser\'.*selected', - progress=.18, - message_template='localechooser selected', - unmatch_nextline_next_matcher_name='localechooser', - match_nextline_next_matcher_name='download-installer' - ), - 'download-installer': LineMatcher( - pattern=( - r'Menu.*item.*\'download-installer\'.*selected' + 'cobbler': { + 'Ubuntu': AdapterItemMatcher( + file_matchers=[ + FileMatcher( + filename='syslog', + min_progress=0.0, + max_progress=1.0, + line_matchers={ + 'start': LineMatcher( + pattern=r'.*', + progress=.05, + message_template='start installing', + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='ethdetect' ), - progress=.2, - message_template='download installer selected', - unmatch_nextline_next_matcher_name=( - 'download-installer'), - match_nextline_next_matcher_name='clock-setup' - ), - 'clock-setup': LineMatcher( - pattern=r'Menu.*item.*\'clock-setup\'.*selected', - progress=.3, - message_template='clock-setup selected', - unmatch_nextline_next_matcher_name='clock-setup', - match_nextline_next_matcher_name='disk-detect' - ), - 'disk-detect': LineMatcher( - pattern=r'Menu.*item.*\'disk-detect\'.*selected', - progress=.32, - message_template='disk-detect selected', - unmatch_nextline_next_matcher_name='disk-detect', - match_nextline_next_matcher_name='partman-base' - ), - 'partman-base': LineMatcher( - pattern=r'Menu.*item.*\'partman-base\'.*selected', - progress=.35, - message_template='partman-base selected', - unmatch_nextline_next_matcher_name='partman-base', - match_nextline_next_matcher_name='live-installer' - ), - 'live-installer': LineMatcher( - pattern=r'Menu.*item.*\'live-installer\'.*selected', - progress=.45, - message_template='live-installer selected', - unmatch_nextline_next_matcher_name='live-installer', - match_nextline_next_matcher_name='pkgsel' - ), - 'pkgsel': LineMatcher( - pattern=r'Menu.*item.*\'pkgsel\'.*selected', - progress=.5, - message_template='pkgsel selected', - unmatch_nextline_next_matcher_name='pkgsel', - match_nextline_next_matcher_name='grub-installer' - ), - 'grub-installer': LineMatcher( - pattern=r'Menu.*item.*\'grub-installer\'.*selected', - progress=.9, - message_template='grub-installer selected', - unmatch_nextline_next_matcher_name='grub-installer', - match_nextline_next_matcher_name='finish-install' - ), - 'finish-install': LineMatcher( - pattern=r'Menu.*item.*\'finish-install\'.*selected', - progress=.95, - message_template='finish-install selected', - unmatch_nextline_next_matcher_name='finish-install', - match_nextline_next_matcher_name='finish-install-done' - ), - 'finish-install-done': LineMatcher( - pattern=r'Running.*finish-install.d/.*save-logs', - progress=1.0, - message_template='finish-install is done', - unmatch_nextline_next_matcher_name=( - 'finish-install-done' + 'ethdetect': LineMatcher( + pattern=r'Menu.*item.*\'ethdetect\'.*selected', + progress=.1, + message_template='ethdetect selected', + unmatch_nextline_next_matcher_name='ethdetect', + match_nextline_next_matcher_name='netcfg' ), - match_nextline_next_matcher_name='exit' - ), - } - ), - FileMatcher( - filename='status', - min_progress=.2, - max_progress=.3, - line_matchers={ - 'start': LineMatcher( - pattern=r'Package: (?P.*)', - progress=IncrementalProgress(0.0, 0.99, 0.05), - message_template='Installing udeb %(package)s', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='start' - ) - } - ), - FileMatcher( - filename='initial-status', - min_progress=.5, - max_progress=.9, - line_matchers={ - 'start': LineMatcher( - pattern=r'Package: (?P.*)', - progress=IncrementalProgress(0.0, 0.99, 0.01), - message_template='Installing deb %(package)s', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='start' - ) - } - ), - ] - ), - 'CentOS': AdapterItemMatcher( - file_matchers=[ - FileMatcher( - filename='sys.log', - min_progress=0.0, - max_progress=0.1, - line_matchers={ - 'start': LineMatcher( - pattern=r'NOTICE (?P.*)', - progress=IncrementalProgress(.1, .9, .1), - message_template='%(message)s', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='exit' - ), - } - ), - FileMatcher( - filename='anaconda.log', - min_progress=0.1, - max_progress=1.0, - line_matchers={ - 'start': LineMatcher( - pattern=r'setting.*up.*kickstart', - progress=.1, - message_template=( - 'Setting up kickstart configurations'), - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='STEP_STAGE2' - ), - 'STEP_STAGE2': LineMatcher( - pattern=r'starting.*STEP_STAGE2', - progress=.15, - message_template=( - 'Downloading installation ' - 'images from server'), - unmatch_nextline_next_matcher_name='STEP_STAGE2', - match_nextline_next_matcher_name='start_anaconda' - ), - 'start_anaconda': LineMatcher( - pattern=r'Running.*anaconda.*script', - progress=.2, - unmatch_nextline_next_matcher_name=( - 'start_anaconda'), - match_nextline_next_matcher_name=( - 'start_kickstart_pre') - ), - 'start_kickstart_pre': LineMatcher( - pattern=r'Running.*kickstart.*pre.*script', - progress=.25, - unmatch_nextline_next_matcher_name=( - 'start_kickstart_pre'), - match_nextline_next_matcher_name=( - 'kickstart_pre_done') - ), - 'kickstart_pre_done': LineMatcher( - pattern=( - r'All.*kickstart.*pre.*script.*have.*been.*run'), - progress=.3, - unmatch_nextline_next_matcher_name=( - 'kickstart_pre_done'), - match_nextline_next_matcher_name=( - 'start_enablefilesystem') - ), - 'start_enablefilesystem': LineMatcher( - pattern=r'moving.*step.*enablefilesystems', - progress=0.3, - message_template=( - 'Performing hard-disk partitioning and ' - 'enabling filesystems'), - unmatch_nextline_next_matcher_name=( - 'start_enablefilesystem'), - match_nextline_next_matcher_name=( - 'enablefilesystem_done') - ), - 'enablefilesystem_done': LineMatcher( - pattern=r'leaving.*step.*enablefilesystems', - progress=.35, - message_template='Filesystems are enabled', - unmatch_nextline_next_matcher_name=( - 'enablefilesystem_done'), - match_nextline_next_matcher_name=( - 'setup_repositories') - ), - 'setup_repositories': LineMatcher( - pattern=r'moving.*step.*reposetup', - progress=0.35, - message_template=( - 'Setting up Customized Repositories'), - unmatch_nextline_next_matcher_name=( - 'setup_repositories'), - match_nextline_next_matcher_name=( - 'repositories_ready') - ), - 'repositories_ready': LineMatcher( - pattern=r'leaving.*step.*reposetup', - progress=0.4, - message_template=( - 'Customized Repositories setting up are done'), - unmatch_nextline_next_matcher_name=( - 'repositories_ready'), - match_nextline_next_matcher_name='checking_dud' - ), - 'checking_dud': LineMatcher( - pattern=r'moving.*step.*postselection', - progress=0.4, - message_template='Checking DUD modules', - unmatch_nextline_next_matcher_name='checking_dud', - match_nextline_next_matcher_name='dud_checked' - ), - 'dud_checked': LineMatcher( - pattern=r'leaving.*step.*postselection', - progress=0.5, - message_template='Checking DUD modules are done', - unmatch_nextline_next_matcher_name='dud_checked', - match_nextline_next_matcher_name='installing_packages' - ), - 'installing_packages': LineMatcher( - pattern=r'moving.*step.*installpackages', - progress=0.5, - message_template='Installing packages', - unmatch_nextline_next_matcher_name=( - 'installing_packages'), - match_nextline_next_matcher_name=( - 'packages_installed') - ), - 'packages_installed': LineMatcher( - pattern=r'leaving.*step.*installpackages', - progress=0.8, - message_template='Packages are installed', - unmatch_nextline_next_matcher_name=( - 'packages_installed'), - match_nextline_next_matcher_name=( - 'installing_bootloader') - ), - 'installing_bootloader': LineMatcher( - pattern=r'moving.*step.*instbootloader', - progress=0.9, - message_template='Installing bootloaders', - unmatch_nextline_next_matcher_name=( - 'installing_bootloader'), - match_nextline_next_matcher_name=( - 'bootloader_installed'), - ), - 'bootloader_installed': LineMatcher( - pattern=r'leaving.*step.*instbootloader', - progress=1.0, - message_template='bootloaders is installed', - unmatch_nextline_next_matcher_name=( - 'bootloader_installed'), - match_nextline_next_matcher_name='exit' - ), - } - ), - FileMatcher( - filename='install.log', - min_progress=0.56, - max_progress=0.80, - line_matchers={ - 'start': LineMatcher( - pattern=r'Installing (?P.*)', - progress=IncrementalProgress(0.0, 0.99, 0.005), - message_template='Installing %(package)s', - unmatch_sameline_next_matcher_name='package_complete', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='start' - ), - 'package_complete': LineMatcher( - pattern='FINISHED.*INSTALLING.*PACKAGES', - progress=1.0, - message_template='installing packages finished', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='exit' - ), - } - ), - ] - ), + 'netcfg': LineMatcher( + pattern=r'Menu.*item.*\'netcfg\'.*selected', + progress=.12, + message_template='netcfg selected', + unmatch_nextline_next_matcher_name='netcfg', + match_nextline_next_matcher_name='network-preseed' + ), + 'network-preseed': LineMatcher( + pattern=( + r'Menu.*item.*\'network-preseed\'.*selected' + ), + progress=.15, + message_template='network-preseed selected', + unmatch_nextline_next_matcher_name=( + 'network-preseed' + ), + match_nextline_next_matcher_name='localechooser' + ), + 'localechoose': LineMatcher( + pattern=r'Menu.*item.*\'localechooser\'.*selected', + progress=.18, + message_template='localechooser selected', + unmatch_nextline_next_matcher_name='localechooser', + match_nextline_next_matcher_name=( + 'download-installer' + ) + ), + 'download-installer': LineMatcher( + pattern=( + r'Menu.*item.*\'download-installer\'.*selected' + ), + progress=.2, + message_template='download installer selected', + unmatch_nextline_next_matcher_name=( + 'download-installer'), + match_nextline_next_matcher_name='clock-setup' + ), + 'clock-setup': LineMatcher( + pattern=r'Menu.*item.*\'clock-setup\'.*selected', + progress=.3, + message_template='clock-setup selected', + unmatch_nextline_next_matcher_name='clock-setup', + match_nextline_next_matcher_name='disk-detect' + ), + 'disk-detect': LineMatcher( + pattern=r'Menu.*item.*\'disk-detect\'.*selected', + progress=.32, + message_template='disk-detect selected', + unmatch_nextline_next_matcher_name='disk-detect', + match_nextline_next_matcher_name='partman-base' + ), + 'partman-base': LineMatcher( + pattern=( + r'Menu.*item.*\'partman-base\'.*selected' + ), + progress=.35, + message_template='partman-base selected', + unmatch_nextline_next_matcher_name='partman-base', + match_nextline_next_matcher_name='live-installer' + ), + 'live-installer': LineMatcher( + pattern=( + r'Menu.*item.*\'live-installer\'.*selected' + ), + progress=.45, + message_template='live-installer selected', + unmatch_nextline_next_matcher_name=( + 'live-installer' + ), + match_nextline_next_matcher_name='pkgsel' + ), + 'pkgsel': LineMatcher( + pattern=r'Menu.*item.*\'pkgsel\'.*selected', + progress=.5, + message_template='pkgsel selected', + unmatch_nextline_next_matcher_name='pkgsel', + match_nextline_next_matcher_name='grub-installer' + ), + 'grub-installer': LineMatcher( + pattern=( + r'Menu.*item.*\'grub-installer\'.*selected' + ), + progress=.9, + message_template='grub-installer selected', + unmatch_nextline_next_matcher_name=( + 'grub-installer' + ), + match_nextline_next_matcher_name='finish-install' + ), + 'finish-install': LineMatcher( + pattern=( + r'Menu.*item.*\'finish-install\'.*selected' + ), + progress=.95, + message_template='finish-install selected', + unmatch_nextline_next_matcher_name=( + 'finish-install' + ), + match_nextline_next_matcher_name=( + 'finish-install-done' + ) + ), + 'finish-install-done': LineMatcher( + pattern=( + r'Running.*finish-install.d/.*save-logs' + ), + progress=1.0, + message_template='finish-install is done', + unmatch_nextline_next_matcher_name=( + 'finish-install-done' + ), + match_nextline_next_matcher_name='exit' + ), + } + ), + FileMatcher( + filename='status', + min_progress=.2, + max_progress=.3, + line_matchers={ + 'start': LineMatcher( + pattern=r'Package: (?P.*)', + progress=IncrementalProgress(0.0, 0.99, 0.05), + message_template='Installing udeb %(package)s', + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='start' + ) + } + ), + FileMatcher( + filename='initial-status', + min_progress=.5, + max_progress=.9, + line_matchers={ + 'start': LineMatcher( + pattern=r'Package: (?P.*)', + progress=IncrementalProgress(0.0, 0.99, 0.01), + message_template='Installing deb %(package)s', + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='start' + ) + } + ), + ] + ), + 'CentOS': AdapterItemMatcher( + file_matchers=[ + FileMatcher( + filename='sys.log', + min_progress=0.0, + max_progress=0.1, + line_matchers={ + 'start': LineMatcher( + pattern=r'NOTICE (?P.*)', + progress=IncrementalProgress(.1, .9, .1), + message_template='%(message)s', + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='exit' + ), + } + ), + FileMatcher( + filename='anaconda.log', + min_progress=0.1, + max_progress=1.0, + line_matchers={ + 'start': LineMatcher( + pattern=r'setting.*up.*kickstart', + progress=.1, + message_template=( + 'Setting up kickstart configurations'), + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='STEP_STAGE2' + ), + 'STEP_STAGE2': LineMatcher( + pattern=r'starting.*STEP_STAGE2', + progress=.15, + message_template=( + 'Downloading installation ' + 'images from server'), + unmatch_nextline_next_matcher_name='STEP_STAGE2', + match_nextline_next_matcher_name='start_anaconda' + ), + 'start_anaconda': LineMatcher( + pattern=r'Running.*anaconda.*script', + progress=.2, + unmatch_nextline_next_matcher_name=( + 'start_anaconda'), + match_nextline_next_matcher_name=( + 'start_kickstart_pre') + ), + 'start_kickstart_pre': LineMatcher( + pattern=r'Running.*kickstart.*pre.*script', + progress=.25, + unmatch_nextline_next_matcher_name=( + 'start_kickstart_pre'), + match_nextline_next_matcher_name=( + 'kickstart_pre_done') + ), + 'kickstart_pre_done': LineMatcher( + pattern=( + r'All.*kickstart.*pre' + '.*script.*have.*been.*run' + ), + progress=.3, + unmatch_nextline_next_matcher_name=( + 'kickstart_pre_done'), + match_nextline_next_matcher_name=( + 'start_enablefilesystem') + ), + 'start_enablefilesystem': LineMatcher( + pattern=( + r'moving.*step.*enablefilesystems' + ), + progress=0.3, + message_template=( + 'Performing hard-disk partitioning and ' + 'enabling filesystems' + ), + unmatch_nextline_next_matcher_name=( + 'start_enablefilesystem'), + match_nextline_next_matcher_name=( + 'enablefilesystem_done') + ), + 'enablefilesystem_done': LineMatcher( + pattern=( + r'leaving.*step.*enablefilesystems' + ), + progress=.35, + message_template='Filesystems are enabled', + unmatch_nextline_next_matcher_name=( + 'enablefilesystem_done'), + match_nextline_next_matcher_name=( + 'setup_repositories') + ), + 'setup_repositories': LineMatcher( + pattern=r'moving.*step.*reposetup', + progress=0.35, + message_template=( + 'Setting up Customized Repositories' + ), + unmatch_nextline_next_matcher_name=( + 'setup_repositories'), + match_nextline_next_matcher_name=( + 'repositories_ready') + ), + 'repositories_ready': LineMatcher( + pattern=r'leaving.*step.*reposetup', + progress=0.4, + message_template=( + 'Customized Repositories setting up are done' + ), + unmatch_nextline_next_matcher_name=( + 'repositories_ready'), + match_nextline_next_matcher_name='checking_dud' + ), + 'checking_dud': LineMatcher( + pattern=r'moving.*step.*postselection', + progress=0.4, + message_template='Checking DUD modules', + unmatch_nextline_next_matcher_name='checking_dud', + match_nextline_next_matcher_name='dud_checked' + ), + 'dud_checked': LineMatcher( + pattern=r'leaving.*step.*postselection', + progress=0.5, + message_template='Checking DUD modules are done', + unmatch_nextline_next_matcher_name='dud_checked', + match_nextline_next_matcher_name=( + 'installing_packages' + ) + ), + 'installing_packages': LineMatcher( + pattern=r'moving.*step.*installpackages', + progress=0.5, + message_template='Installing packages', + unmatch_nextline_next_matcher_name=( + 'installing_packages'), + match_nextline_next_matcher_name=( + 'packages_installed') + ), + 'packages_installed': LineMatcher( + pattern=r'leaving.*step.*installpackages', + progress=0.8, + message_template='Packages are installed', + unmatch_nextline_next_matcher_name=( + 'packages_installed'), + match_nextline_next_matcher_name=( + 'installing_bootloader') + ), + 'installing_bootloader': LineMatcher( + pattern=r'moving.*step.*instbootloader', + progress=0.9, + message_template='Installing bootloaders', + unmatch_nextline_next_matcher_name=( + 'installing_bootloader'), + match_nextline_next_matcher_name=( + 'bootloader_installed'), + ), + 'bootloader_installed': LineMatcher( + pattern=r'leaving.*step.*instbootloader', + progress=1.0, + message_template='bootloaders is installed', + unmatch_nextline_next_matcher_name=( + 'bootloader_installed'), + match_nextline_next_matcher_name='exit' + ), + } + ), + FileMatcher( + filename='install.log', + min_progress=0.56, + max_progress=0.80, + line_matchers={ + 'start': LineMatcher( + pattern=r'Installing (?P.*)', + progress=IncrementalProgress(0.0, 0.99, 0.005), + message_template='Installing %(package)s', + unmatch_sameline_next_matcher_name=( + 'package_complete' + ), + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='start' + ), + 'package_complete': LineMatcher( + pattern='FINISHED.*INSTALLING.*PACKAGES', + progress=1.0, + message_template='installing packages finished', + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='exit' + ), + } + ), + ] + ), + } } PACKAGE_INSTALLER_CONFIGURATIONS = { - 'openstack': AdapterItemMatcher( - file_matchers=[ - FileMatcher( - filename='chef-client.log', - min_progress=0.1, - max_progress=1.0, - line_matchers={ - 'start': LineMatcher( - pattern=( - r'Processing\s*(?P.*)' - r'\[(?P.*)\].*'), - progress=IncrementalProgress(0.0, .90, 0.005), - message_template=( - 'Processing %(install_type)s %(package)s'), - unmatch_sameline_next_matcher_name=( - 'chef_complete'), - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='start' - ), - 'chef_complete': LineMatcher( - pattern=r'Chef.*Run.*complete', - progress=1.0, - message_template='Chef run complete', - unmatch_nextline_next_matcher_name='start', - match_nextline_next_matcher_name='exit' - ), - } - ), - ] - ), + 'chef_installer': { + 'openstack': AdapterItemMatcher( + file_matchers=[ + FileMatcher( + filename='chef-client.log', + min_progress=0.1, + max_progress=1.0, + line_matchers={ + 'start': LineMatcher( + pattern=( + r'Processing\s*(?P.*)' + r'\[(?P.*)\].*'), + progress=IncrementalProgress(0.0, .90, 0.005), + message_template=( + 'Processing %(install_type)s %(package)s'), + unmatch_sameline_next_matcher_name=( + 'chef_complete'), + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='start' + ), + 'chef_complete': LineMatcher( + pattern=r'Chef.*Run.*complete', + progress=1.0, + message_template='Chef run complete', + unmatch_nextline_next_matcher_name='start', + match_nextline_next_matcher_name='exit' + ), + } + ), + ] + ), + } } OS_ADAPTER_CONFIGURATIONS = [ OSMatcher( os_installer_name='cobbler', os_pattern='CentOS.*', - item_matcher=OS_INSTALLER_CONFIGURATIONS['CentOS'], - min_progress=0.0, - max_progress=1.0 + item_matcher=OS_INSTALLER_CONFIGURATIONS['cobbler']['CentOS'], + file_reader_factory=FileReaderFactory( + setting.INSTALLATION_LOGDIR['CobblerInstaller'] + ) ), OSMatcher( os_installer_name='cobbler', os_pattern='Ubuntu.*', - item_matcher=OS_INSTALLER_CONFIGURATIONS['Ubuntu'], - min_progress=0.0, - max_progress=1.0 + item_matcher=OS_INSTALLER_CONFIGURATIONS['cobbler']['Ubuntu'], + file_reader_factory=FileReaderFactory( + setting.INSTALLATION_LOGDIR['CobblerInstaller'] + ) ) ] PACKAGE_ADAPTER_CONFIGURATIONS = [ PackageMatcher( - package_installer_name='chef.*', - target_system='openstack', - item_matcher=PACKAGE_INSTALLER_CONFIGURATIONS['openstack'], - min_progress=0.0, - max_progress=1.0 + package_installer_name='chef_installer', + distributed_system_pattern='openstack.*', + item_matcher=PACKAGE_INSTALLER_CONFIGURATIONS[ + 'chef_installer']['openstack'], + file_reader_factory=FileReaderFactory( + setting.INSTALLATION_LOGDIR['ChefInstaller'] + ) ) ] -def _get_os_adapter_matcher(os_installer, os_name): +def _get_os_matcher(os_installer_name, os_name): """Get OS adapter matcher by os name and installer name.""" for configuration in OS_ADAPTER_CONFIGURATIONS: - if configuration.match(os_installer, os_name): + if configuration.match(os_installer_name, os_name): return configuration else: logging.debug('configuration %s does not match %s and %s', - configuration, os_name, os_installer) + configuration, os_name, os_installer_name) logging.error('No configuration found for os installer %s os %s', - os_installer, os_name) + os_installer_name, os_name) return None -def _get_package_adapter_matcher(package_installer, target_system): +def _get_package_matcher( + package_installer_name, distributed_system_name +): """Get package adapter matcher by pacakge name and installer name.""" for configuration in PACKAGE_ADAPTER_CONFIGURATIONS: - if configuration.match(package_installer, target_system): + if configuration.match( + package_installer_name, + distributed_system_name + ): return configuration else: logging.debug('configuration %s does not match %s and %s', - configuration, target_system, package_installer) + configuration, distributed_system_name, + package_installer_name) logging.error('No configuration found for package installer %s os %s', - package_installer, target_system) + package_installer_name, distributed_system_name) return None -def update_progress( - os_installers, os_names, package_installers, target_systems, - cluster_hosts -): - """Update adapter installing progress. - - :param os_installers: cluster id to os installer name - :param package_installers: cluster id to package installer name. - :param cluster_hosts: clusters and hosts in each cluster to update. - :param cluster_hosts: dict of int to list of int. - """ - for clusterid, hostids in cluster_hosts.items(): - """ - adapter = _get_adapter_matcher(os_installers[clusterid], - os_names[clusterid], - package_installers[clusterid], - target_systems[clusterid]) - if not adapter: - continue - - adapter.update_progress(clusterid, hostids) - """ - os_adapter = _get_os_adapter_matcher( - os_installers[clusterid], os_names[clusterid] +def update_host_progress(host_mappping): + for host_id, (host, host_state, host_log_history_mapping) in ( + host_mappping.items() + ): + os_name = host['os_name'] + os_installer_name = host['os_installer']['name'] + os_matcher = _get_os_matcher( + os_installer_name, os_name ) - package_adapter = _get_package_adapter_matcher( - package_installers[clusterid], - target_systems[clusterid] - ) - if not (os_adapter or package_adapter): + if not os_matcher: continue + name = host[setting.HOST_INSTALLATION_LOGDIR_NAME] + os_matcher.update_progress( + name, host_state, host_log_history_mapping + ) - adapter = AdapterMatcher(os_adapter, package_adapter) - adapter.update_progress(clusterid, hostids) + +def update_clusterhost_progress(clusterhost_mapping): + for ( + clusterhost_id, + (clusterhost, clusterhost_state, clusterhost_log_history_mapping) + ) in ( + clusterhost_mapping.items() + ): + distributed_system_name = clusterhost['distributed_system_name'] + package_installer_name = clusterhost['package_installer']['name'] + package_matcher = _get_package_matcher( + package_installer_name, + distributed_system_name + ) + if not package_matcher: + continue + name = clusterhost[setting.CLUSTERHOST_INATALLATION_LOGDIR_NAME] + package_matcher.update_progress( + name, clusterhost_state, + clusterhost_log_history_mapping + ) + + +def update_cluster_progress(cluster_mapping): + for cluster_id, (cluster, cluster_state) in cluster_mapping.items(): + pass diff --git a/compass/tasks/tasks.py b/compass/tasks/tasks.py index 6e9d80c5..fdd10227 100644 --- a/compass/tasks/tasks.py +++ b/compass/tasks/tasks.py @@ -104,9 +104,6 @@ def reinstall_cluster(installer_email, cluster_id, clusterhost_ids): @celery.task(name='compass.tasks.poweron_host') def poweron_host(host_id): """Deploy the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to deploy. - :type cluster_hosts: dict of int to list of int """ pass @@ -114,9 +111,6 @@ def poweron_host(host_id): @celery.task(name='compass.tasks.poweroff_host') def poweroff_host(host_id): """Deploy the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to deploy. - :type cluster_hosts: dict of int to list of int """ pass @@ -124,9 +118,6 @@ def poweroff_host(host_id): @celery.task(name='compass.tasks.reset_host') def reset_host(host_id): """Deploy the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to deploy. - :type cluster_hosts: dict of int to list of int """ pass @@ -134,9 +125,6 @@ def reset_host(host_id): @celery.task(name='compass.tasks.poweron_machine') def poweron_machine(machine_id): """Deploy the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to deploy. - :type cluster_hosts: dict of int to list of int """ pass @@ -144,9 +132,6 @@ def poweron_machine(machine_id): @celery.task(name='compass.tasks.poweroff_machine') def poweroff_machine(machine_id): """Deploy the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to deploy. - :type cluster_hosts: dict of int to list of int """ pass @@ -154,22 +139,16 @@ def poweroff_machine(machine_id): @celery.task(name='compass.tasks.reset_machine') def reset_machine(machine_id): """Deploy the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to deploy. - :type cluster_hosts: dict of int to list of int """ pass @celery.task(name='compass.tasks.update_progress') -def update_clusters_progress(cluster_hosts): +def update_clusters_progress(): """Calculate the installing progress of the given cluster. - - :param cluster_hosts: the cluster and hosts of each cluster to update. - :type cluster_hosts: dict of int to list of int """ - logging.info('update_clusters_progress: %s', cluster_hosts) + logging.info('update_clusters_progress') try: - update_progress.update_progress(cluster_hosts) + update_progress.update_progress() except Exception as error: logging.exception(error) diff --git a/compass/tests/db/api/test_utils.py b/compass/tests/db/api/test_utils.py index ca67c43a..b400fcfd 100644 --- a/compass/tests/db/api/test_utils.py +++ b/compass/tests/db/api/test_utils.py @@ -563,12 +563,12 @@ class TestUpdateDbObject(unittest2.TestCase): db_obj = utils.get_db_object( session, models.Permission, - id=1 + id=1000 ) - utils.updated_db_object( + utils.update_db_object( session, db_obj, - dummy='dummy' + name='dummy' ) diff --git a/compass/utils/setting_wrapper.py b/compass/utils/setting_wrapper.py index e2821dc2..4658db09 100644 --- a/compass/utils/setting_wrapper.py +++ b/compass/utils/setting_wrapper.py @@ -28,8 +28,10 @@ CONFIG_DIR = '/etc/compass' SQLALCHEMY_DATABASE_URI = 'sqlite://' INSTALLATION_LOGDIR = { 'CobblerInstaller': '/var/log/cobbler/anamon', - 'ChefInstaller': '/var/log/chef' + 'ChefInstaller': '/var/log/cobbler/anamon' } +CLUSTERHOST_INATALLATION_LOGDIR_NAME = 'hostname' +HOST_INSTALLATION_LOGDIR_NAME = 'hostname' DEFAULT_LOGLEVEL = 'debug' DEFAULT_LOGDIR = '/tmp' DEFAULT_LOGINTERVAL = 1 diff --git a/conf/package_installer/chef-icehouse.conf b/conf/package_installer/chef-icehouse.conf index cd88489b..e0cd074c 100644 --- a/conf/package_installer/chef-icehouse.conf +++ b/conf/package_installer/chef-icehouse.conf @@ -1,9 +1,9 @@ NAME = 'chef_installer' INSTANCE_NAME = 'chef_installer' SETTINGS = { - 'chef_url': 'https://$chef_host', - 'chef_server_ip': '', - 'chef_server_dns': '', + 'chef_url': 'https://$chef_ip', + 'chef_server_ip': '$chef_ip', + 'chef_server_dns': '$chef_hostname', 'key_dir': '', 'client_name': '', 'databags': ['user_passwords', 'db_passwords', 'service_passwords', 'secrets'] diff --git a/conf/setting b/conf/setting index 6dbd0146..5a5f7a62 100644 --- a/conf/setting +++ b/conf/setting @@ -7,7 +7,7 @@ DATABASE_NAME = 'db' SQLALCHEMY_DATABASE_URI = '%s://%s:%s@%s/%s' % (DATABASE_TYPE, DATABASE_USER, DATABASE_PASSWORD, DATABASE_SERVER, DATABASE_NAME) INSTALLATION_LOGDIR = { 'CobblerInstaller': '/var/log/cobbler/anamon', - 'ChefInstaller': '/var/log/chef' + 'ChefInstaller': '/var/log/cobbler/anamon' } DEFAULT_LOGLEVEL = 'debug' DEFAULT_LOGDIR = '/var/log/compass' diff --git a/install/compass.sh b/install/compass.sh index 3d87be64..b42b2c53 100755 --- a/install/compass.sh +++ b/install/compass.sh @@ -47,6 +47,7 @@ sudo sed -i "/COBBLER_INSTALLER_URL/c\COBBLER_INSTALLER_URL = 'http:\/\/$ipaddr/ sudo sed -i "s/\$cobbler_ip/$ipaddr/g" /etc/compass/os_installer/cobbler.conf sudo sed -i "/CHEF_INSTALLER_URL/c\CHEF_INSTALLER_URL = 'https:\/\/$ipaddr/'" /etc/compass/setting sudo sed -i "s/\$chef_ip/$ipaddr/g" /etc/compass/package_installer/chef-icehouse.conf +sudo sed -i "s/\$chef_hostname/$HOSTNAME/g" /etc/compass/package_installer/chef-icehouse.conf sudo sed -i "s/\$compass_ip/$ipaddr/g" /etc/compass/global_config sudo sed -i "s/\$compass_hostname/$HOSTNAME/g" /etc/compass/global_config sudo sed -i "s/\$compass_testmode/$TESTMODE/g" /etc/compass/global_config