Too broad exceptions replaced with altenatives.
Blocks catching base Exception were replaced with more constrained alternatives where possible. Alternatives were chosen based on exceptions possibly raised by calls within the `try` clause. Logs were altered to reflect new situation. string method format was used for new log calls. Unit tests were adjusted accordingly. This patch may cause uptick in unhadled exceptions, as previously "hidden" issues become apparent. Increase in the number of exceptions capture, including potential hunk revert should be done with care however. Signed-off-by: Jiri Podivin <jpodivin@redhat.com> Change-Id: I24612700bcbd3e17685ae7a33a27f38a2055a0fbchanges/99/791499/1
parent
b5ca0f42bd
commit
a1aadd37d7
|
@ -97,7 +97,7 @@ def export_stack(heat, stack, should_filter=False,
|
|||
with open(file, 'r') as ff:
|
||||
try:
|
||||
export_data = json.load(ff)
|
||||
except Exception as e:
|
||||
except (TypeError, json.JSONDecodeError) as e:
|
||||
LOG.error(
|
||||
_('Could not read file %s') % file)
|
||||
LOG.error(e)
|
||||
|
@ -130,7 +130,7 @@ def export_ceph_net_key(stack, config_download_dir=constants.DEFAULT_WORK_DIR):
|
|||
with open(file, 'r') as ff:
|
||||
try:
|
||||
global_data = yaml.safe_load(ff)
|
||||
except Exception as e:
|
||||
except yaml.MarkedYAMLError as e:
|
||||
LOG.error(
|
||||
_('Could not read file %s') % file)
|
||||
LOG.error(e)
|
||||
|
@ -146,7 +146,7 @@ def export_storage_ips(stack, config_download_dir=constants.DEFAULT_WORK_DIR,
|
|||
with open(file, 'r') as ff:
|
||||
try:
|
||||
inventory_data = yaml.safe_load(ff)
|
||||
except Exception as e:
|
||||
except yaml.MarkedYAMLError as e:
|
||||
LOG.error(
|
||||
_('Could not read file %s') % file)
|
||||
LOG.error(e)
|
||||
|
@ -174,7 +174,7 @@ def export_ceph(stack, cephx,
|
|||
with open(file, 'r') as ff:
|
||||
try:
|
||||
ceph_data = yaml.safe_load(ff)
|
||||
except Exception as e:
|
||||
except yaml.MarkedYAMLError as e:
|
||||
LOG.error(
|
||||
_('Could not read file %s') % file)
|
||||
LOG.error(e)
|
||||
|
|
|
@ -686,7 +686,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
'_heat_deploy', autospec=True)
|
||||
def test_try_overcloud_deploy_with_no_templates_existing(
|
||||
self, mock_heat_deploy_func):
|
||||
mock_heat_deploy_func.side_effect = Exception('error')
|
||||
mock_heat_deploy_func.side_effect = oscexc.CommandError('error')
|
||||
self.assertRaises(ValueError,
|
||||
self.cmd._try_overcloud_deploy_with_compat_yaml,
|
||||
'/fake/path', mock.ANY, mock.ANY,
|
||||
|
@ -698,7 +698,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
|
|||
def test_try_overcloud_deploy_show_missing_file(
|
||||
self, mock_heat_deploy_func):
|
||||
mock_heat_deploy_func.side_effect = \
|
||||
Exception('/fake/path not found')
|
||||
oscexc.CommandError('/fake/path not found')
|
||||
try:
|
||||
self.cmd._try_overcloud_deploy_with_compat_yaml(
|
||||
'/fake/path', mock.ANY, mock.ANY,
|
||||
|
|
|
@ -1759,7 +1759,7 @@ def archive_deploy_artifacts(log, stack_name, working_dir, ansible_dir=None):
|
|||
tf.add(ansible_dir, recursive=True,
|
||||
filter=tar_filter)
|
||||
tf.close()
|
||||
except Exception as ex:
|
||||
except tarfile.TarError as ex:
|
||||
msg = _("Unable to create artifact tarball, %s") % str(ex)
|
||||
log.warning(msg)
|
||||
return tar_filename
|
||||
|
@ -2391,7 +2391,7 @@ def _get_from_cfg(cfg, accessor, param, section):
|
|||
"""Return a parameter from Kolla config"""
|
||||
try:
|
||||
val = accessor(section, param)
|
||||
except Exception:
|
||||
except (ValueError, configparser.Error):
|
||||
raise exceptions.NotFound(_("Unable to find {section}/{option} in "
|
||||
"{config}").format(section=param,
|
||||
option=section,
|
||||
|
@ -2408,7 +2408,7 @@ def get_local_timezone():
|
|||
# The line returned is "[whitespace]Time zone: [timezone] ([tz], [offset])"
|
||||
try:
|
||||
timezone = timezoneline[0].strip().split(' ')[2]
|
||||
except Exception:
|
||||
except IndexError:
|
||||
LOG.error('Unable to parse timezone from timedatectl, using UTC')
|
||||
timezone = 'UTC'
|
||||
return timezone
|
||||
|
|
|
@ -430,7 +430,7 @@ class DeployOvercloud(command.Command):
|
|||
roles_file,
|
||||
env_files_tracker=env_files_tracker,
|
||||
deployment_options=deployment_options)
|
||||
except Exception as e:
|
||||
except oscexc.CommandError as e:
|
||||
messages = 'Failed to deploy: %s' % str(e)
|
||||
raise ValueError(messages)
|
||||
|
||||
|
|
|
@ -124,9 +124,15 @@ class ValidateOvercloudNetenv(command.Command):
|
|||
try:
|
||||
pool_objs.append(list(
|
||||
ipaddress.summarize_address_range(ip_start, ip_end)))
|
||||
except Exception:
|
||||
self.log.error('Invalid address pool: %s, %s' %
|
||||
(ip_start, ip_end))
|
||||
except (TypeError, ValueError) as ex:
|
||||
self.log.error(
|
||||
(
|
||||
'Encountered exception `{}` while working with\n'
|
||||
'the address pool: {}, {}').format(
|
||||
ex,
|
||||
ip_start,
|
||||
ip_end))
|
||||
|
||||
self.error_count += 1
|
||||
|
||||
subnet_item = poolitem.split('AllocationPools')[0] + 'NetCidr'
|
||||
|
|
|
@ -315,7 +315,7 @@ class Deploy(command.Command):
|
|||
# we'll end up rewriting the passwords later and
|
||||
# causing problems.
|
||||
config.get('auth', 'undercloud_rpc_password')
|
||||
except Exception:
|
||||
except configparser.Error:
|
||||
legacy_env['RpcPassword'] = v
|
||||
k = 'RabbitPassword'
|
||||
elif k == 'undercloud_rabbit_cookie':
|
||||
|
@ -1199,9 +1199,12 @@ class Deploy(command.Command):
|
|||
with open(f, 'r') as ff:
|
||||
try:
|
||||
failures = json.load(ff)
|
||||
except Exception:
|
||||
self.log.error(
|
||||
_('Could not read ansible errors from file %s') % f)
|
||||
except (json.JSONDecodeError, TypeError) as ex:
|
||||
self.log.error(_(
|
||||
'Could not read ansible errors from file {}.\n'
|
||||
'Encountered {}').format(
|
||||
ex,
|
||||
ff))
|
||||
|
||||
if not failures or not failures.get(name, {}):
|
||||
return
|
||||
|
|
|
@ -195,7 +195,7 @@ def scale_down(log, clients, stack, nodes, timeout=None, verbosity=0,
|
|||
try:
|
||||
_node = clients.compute.servers.get(node)
|
||||
limit_list.append(_node.name)
|
||||
except Exception:
|
||||
except (KeyError, ValueError, AttributeError):
|
||||
limit_list.append(node)
|
||||
|
||||
if limit_list:
|
||||
|
|
Loading…
Reference in New Issue