Remove translation of log messages Part-1

The i18n team has decided not to translate the logs because it seems
like it not very useful.

This are the files as of now do not have merge conflicts.

Change-Id: I082f6302f120a8c0c81482b73be301e83fe3a3a8
Partial-Bug: #1674374
changes/84/447884/7
Annie Lezil 6 years ago
parent fec8c78600
commit c06a9a7f24
  1. 4
      ironic_inspector/common/ironic.py
  2. 21
      ironic_inspector/common/lldp_parsers.py
  3. 6
      ironic_inspector/plugins/base.py
  4. 12
      ironic_inspector/plugins/capabilities.py
  5. 12
      ironic_inspector/plugins/discovery.py
  6. 15
      ironic_inspector/plugins/extra_hardware.py
  7. 7
      ironic_inspector/plugins/lldp_basic.py
  8. 17
      ironic_inspector/plugins/pci_devices.py
  9. 16
      ironic_inspector/plugins/raid_device.py
  10. 30
      ironic_inspector/plugins/standard.py
  11. 26
      ironic_inspector/rules.py
  12. 9
      ironic_inspector/test/base.py
  13. 5
      ironic_inspector/test/unit/test_migrations.py
  14. 10
      ironic_inspector/utils.py

@ -18,7 +18,7 @@ from ironicclient import exceptions as ironic_exc
import netaddr
from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LW
from ironic_inspector.common.i18n import _
from ironic_inspector.common import keystone
from ironic_inspector import utils
@ -108,7 +108,7 @@ def get_ipmi_address(node):
node_info=node)
if netaddr.IPAddress(ip).is_loopback():
LOG.warning(_LW('Ignoring loopback BMC address %s'), ip,
LOG.warning('Ignoring loopback BMC address %s', ip,
node_info=node)
ip = None

@ -18,7 +18,7 @@ import binascii
from construct import core
import netaddr
from ironic_inspector.common.i18n import _, _LW
from ironic_inspector.common.i18n import _
from ironic_inspector.common import lldp_tlvs as tlv
from ironic_inspector import utils
@ -136,7 +136,7 @@ class LLDPParser(object):
name = s[2]
check_len = s[3]
except KeyError as e:
LOG.warning(_LW("Key error in TLV table: %s"), e,
LOG.warning("Key error in TLV table: %s", e,
node_info=self.node_info)
return False
@ -144,10 +144,11 @@ class LLDPParser(object):
# proper number of bytes has been provided, for example
# when a BitStruct is used.
if check_len and (tlv_parser.sizeof() != len(data)):
LOG.warning(_LW('Invalid data for %(name)s '
'expected len %(expect)d, got %(actual)d'),
{'name': name, 'expect': tlv_parser.sizeof(),
'actual': len(data)}, node_info=self.node_info)
LOG.warning("Invalid data for %(name)s expected len %(expect)d, "
"got %(actual)d", {'name': name,
'expect': tlv_parser.sizeof(),
'actual': len(data)},
node_info=self.node_info)
return False
# Use the construct parser to parse TLV so that it's
@ -156,7 +157,7 @@ class LLDPParser(object):
struct = tlv_parser.parse(data)
except (core.RangeError, core.FieldError, core.MappingError,
netaddr.AddrFormatError) as e:
LOG.warning(_LW("TLV parse error: %s"), e,
LOG.warning("TLV parse error: %s", e,
node_info=self.node_info)
return False
@ -164,7 +165,7 @@ class LLDPParser(object):
try:
func(struct, name, data)
except ValueError as e:
LOG.warning(_LW("TLV value error: %s"), e,
LOG.warning("TLV value error: %s", e,
node_info=self.node_info)
return False
@ -271,8 +272,8 @@ class LLDPBasicMgmtParser(LLDPParser):
else:
LOG.debug("Subtype %d not found for 802.3", subtype)
else:
LOG.warning(_LW("Organizationally Unique ID %s not "
"recognized"), oui, node_info=self.node_info)
LOG.warning("Organizationally Unique ID %s not "
"recognized", oui, node_info=self.node_info)
class LLDPdot1Parser(LLDPParser):

@ -20,7 +20,7 @@ from oslo_log import log
import six
import stevedore
from ironic_inspector.common.i18n import _, _LW
from ironic_inspector.common.i18n import _
CONF = cfg.CONF
@ -207,8 +207,8 @@ def rule_actions_manager():
for act in _ACTIONS_MGR:
# a trick to detect if function was overridden
if "rollback" in act.obj.__class__.__dict__:
LOG.warning(_LW('Defining "rollback" for introspection rules '
'actions is deprecated (action "%s")'),
LOG.warning('Defining "rollback" for introspection rules '
'actions is deprecated (action "%s")',
act.name)
return _ACTIONS_MGR

@ -15,7 +15,7 @@
from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LI, _LW
from ironic_inspector.common.i18n import _
from ironic_inspector.plugins import base
from ironic_inspector import utils
@ -56,19 +56,19 @@ class CapabilitiesHook(base.ProcessingHook):
def _detect_boot_mode(self, inventory, node_info, data=None):
boot_mode = inventory.get('boot', {}).get('current_boot_mode')
if boot_mode is not None:
LOG.info(_LI('Boot mode was %s'), boot_mode,
LOG.info('Boot mode was %s', boot_mode,
data=data, node_info=node_info)
return {'boot_mode': boot_mode}
else:
LOG.warning(_LW('No boot mode information available'),
LOG.warning('No boot mode information available',
data=data, node_info=node_info)
return {}
def _detect_cpu_flags(self, inventory, node_info, data=None):
flags = inventory['cpu'].get('flags')
if not flags:
LOG.warning(_LW('No CPU flags available, please update your '
'introspection ramdisk'),
LOG.warning('No CPU flags available, please update your '
'introspection ramdisk',
data=data, node_info=node_info)
return {}
@ -78,7 +78,7 @@ class CapabilitiesHook(base.ProcessingHook):
if flag in flags:
caps[name] = 'true'
LOG.info(_LI('CPU capabilities: %s'), list(caps),
LOG.info('CPU capabilities: %s', list(caps),
data=data, node_info=node_info)
return caps

@ -15,7 +15,7 @@
from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LW
from ironic_inspector.common.i18n import _
from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import node_cache
from ironic_inspector import utils
@ -46,8 +46,8 @@ def _extract_node_driver_info(introspection_data):
if ipmi_address:
node_driver_info['ipmi_address'] = ipmi_address
else:
LOG.warning(_LW('No BMC address provided, discovered node will be '
'created without ipmi address'))
LOG.warning('No BMC address provided, discovered node will be '
'created without ipmi address')
return node_driver_info
@ -63,9 +63,9 @@ def _check_existing_nodes(introspection_data, node_driver_info, ironic):
_('Port %(mac)s already exists, uuid: %(uuid)s') %
{'mac': mac, 'uuid': ports[0].uuid}, data=introspection_data)
else:
LOG.warning(_LW('No suitable interfaces found for discovered node. '
'Check that validate_interfaces hook is listed in '
'[processing]default_processing_hooks config option'))
LOG.warning('No suitable interfaces found for discovered node. '
'Check that validate_interfaces hook is listed in '
'[processing]default_processing_hooks config option')
# verify existing node with discovered ipmi address
ipmi_address = node_driver_info.get('ipmi_address')

@ -20,7 +20,6 @@ is stored in the 'inspector' container.
import json
from ironic_inspector.common.i18n import _LW
from ironic_inspector.common import swift
from ironic_inspector.plugins import base
from ironic_inspector import utils
@ -47,9 +46,9 @@ class ExtraHardwareHook(base.ProcessingHook):
Otherwise, it does nothing.
"""
if 'data' not in introspection_data:
LOG.warning(_LW('No extra hardware information was received from '
'the ramdisk'),
node_info=node_info, data=introspection_data)
LOG.warning('No extra hardware information was received from '
'the ramdisk', node_info=node_info,
data=introspection_data)
return
data = introspection_data['data']
@ -66,10 +65,10 @@ class ExtraHardwareHook(base.ProcessingHook):
node_info=node_info, data=introspection_data)
introspection_data['extra'] = self._convert_edeploy_data(data)
else:
LOG.warning(_LW('Extra hardware data was not in a recognised '
'format (eDeploy), and will not be forwarded to '
'introspection rules'),
node_info=node_info, data=introspection_data)
LOG.warning('Extra hardware data was not in a recognised '
'format (eDeploy), and will not be forwarded to '
'introspection rules', node_info=node_info,
data=introspection_data)
LOG.debug('Deleting \"data\" key from introspection data as it is '
'assumed unusable by introspection rules. Raw data is '

@ -15,7 +15,6 @@
import binascii
from ironic_inspector.common.i18n import _LW
from ironic_inspector.common import lldp_parsers
from ironic_inspector.plugins import base
from ironic_inspector import utils
@ -48,9 +47,9 @@ class LLDPBasicProcessingHook(base.ProcessingHook):
try:
data = bytearray(binascii.a2b_hex(tlv_value))
except TypeError as e:
LOG.warning(_LW(
LOG.warning(
"TLV value for TLV type %(tlv_type)d not in correct "
"format, value must be in hexadecimal: %(msg)s"),
"format, value must be in hexadecimal: %(msg)s",
{'tlv_type': tlv_type, 'msg': e}, node_info=node_info)
continue
@ -73,7 +72,7 @@ class LLDPBasicProcessingHook(base.ProcessingHook):
tlvs = iface.get('lldp')
if tlvs is None:
LOG.warning(_LW("No LLDP Data found for interface %s"),
LOG.warning("No LLDP Data found for interface %s",
if_name, node_info=node_info)
continue

@ -18,7 +18,7 @@ import json
from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LI, _LW, _LE
from ironic_inspector.common.i18n import _
from ironic_inspector.plugins import base
from ironic_inspector import utils
@ -49,12 +49,12 @@ def _parse_pci_alias_entry():
try:
parsed_entry = json.loads(pci_alias_entry)
if set(parsed_entry) != {'vendor_id', 'product_id', 'name'}:
raise KeyError(_LE("The 'alias' entry should contain "
"exactly 'vendor_id', 'product_id' and "
"'name' keys"))
raise KeyError("The 'alias' entry should contain "
"exactly 'vendor_id', 'product_id' and "
"'name' keys")
parsed_pci_devices.append(parsed_entry)
except (ValueError, KeyError) as ex:
LOG.error(_LE("Error parsing 'alias' option: %s"), ex)
LOG.error("Error parsing 'alias' option: %s", ex)
return {(dev['vendor_id'], dev['product_id']): dev['name']
for dev in parsed_pci_devices}
@ -75,13 +75,12 @@ class PciDevicesHook(base.ProcessingHook):
def before_update(self, introspection_data, node_info, **kwargs):
if 'pci_devices' not in introspection_data:
if CONF.pci_devices.alias:
LOG.warning(_LW('No PCI devices information was received from '
'the ramdisk.'))
LOG.warning('No PCI devices information was received from '
'the ramdisk.')
return
alias_count = {self.aliases[id_pair]: count for id_pair, count in
self._found_pci_devices_count(
introspection_data['pci_devices']).items()}
if alias_count:
node_info.update_capabilities(**alias_count)
LOG.info(_LI('Found the following PCI devices: %s'),
alias_count)
LOG.info('Found the following PCI devices: %s', alias_count)

@ -13,7 +13,6 @@
"""Gather root device hint from recognized block devices."""
from ironic_inspector.common.i18n import _LI, _LW
from ironic_inspector.plugins import base
from ironic_inspector import utils
@ -53,22 +52,21 @@ class RaidDeviceDetection(base.ProcessingHook):
def before_processing(self, introspection_data, **kwargs):
"""Adds fake local_gb value if it's missing from introspection_data."""
if not introspection_data.get('local_gb'):
LOG.info(_LI('No volume is found on the node. Adding a fake '
'value for "local_gb"'),
data=introspection_data)
LOG.info('No volume is found on the node. Adding a fake '
'value for "local_gb"', data=introspection_data)
introspection_data['local_gb'] = 1
def before_update(self, introspection_data, node_info, **kwargs):
current_devices = self._get_serials(introspection_data)
if not current_devices:
LOG.warning(_LW('No block device was received from ramdisk'),
LOG.warning('No block device was received from ramdisk',
node_info=node_info, data=introspection_data)
return
node = node_info.node()
if 'root_device' in node.properties:
LOG.info(_LI('Root device is already known for the node'),
LOG.info('Root device is already known for the node',
node_info=node_info, data=introspection_data)
return
@ -79,12 +77,12 @@ class RaidDeviceDetection(base.ProcessingHook):
if device not in previous_devices]
if len(new_devices) > 1:
LOG.warning(_LW('Root device cannot be identified because '
'multiple new devices were found'),
LOG.warning('Root device cannot be identified because '
'multiple new devices were found',
node_info=node_info, data=introspection_data)
return
elif len(new_devices) == 0:
LOG.warning(_LW('No new devices were found'),
LOG.warning('No new devices were found',
node_info=node_info, data=introspection_data)
return

@ -22,7 +22,7 @@ from oslo_utils import netutils
from oslo_utils import units
import six
from ironic_inspector.common.i18n import _, _LC, _LE, _LI, _LW
from ironic_inspector.common.i18n import _
from ironic_inspector import conf
from ironic_inspector.plugins import base
from ironic_inspector import utils
@ -111,8 +111,8 @@ class SchedulerHook(base.ProcessingHook):
'; '.join(errors),
node_info=node_info, data=introspection_data)
LOG.info(_LI('Discovered data: CPUs: %(cpus)s %(cpu_arch)s, '
'memory %(memory_mb)s MiB, disk %(local_gb)s GiB'),
LOG.info('Discovered data: CPUs: %(cpus)s %(cpu_arch)s, '
'memory %(memory_mb)s MiB, disk %(local_gb)s GiB',
{key: introspection_data.get(key) for key in self.KEYS},
node_info=node_info, data=introspection_data)
@ -128,15 +128,15 @@ class ValidateInterfacesHook(base.ProcessingHook):
def __init__(self):
if CONF.processing.add_ports not in conf.VALID_ADD_PORTS_VALUES:
LOG.critical(_LC('Accepted values for [processing]add_ports are '
'%(valid)s, got %(actual)s'),
LOG.critical('Accepted values for [processing]add_ports are '
'%(valid)s, got %(actual)s',
{'valid': conf.VALID_ADD_PORTS_VALUES,
'actual': CONF.processing.add_ports})
sys.exit(1)
if CONF.processing.keep_ports not in conf.VALID_KEEP_PORTS_VALUES:
LOG.critical(_LC('Accepted values for [processing]keep_ports are '
'%(valid)s, got %(actual)s'),
LOG.critical('Accepted values for [processing]keep_ports are '
'%(valid)s, got %(actual)s',
{'valid': conf.VALID_KEEP_PORTS_VALUES,
'actual': CONF.processing.keep_ports})
sys.exit(1)
@ -156,7 +156,7 @@ class ValidateInterfacesHook(base.ProcessingHook):
client_id = iface.get('client_id')
if not name:
LOG.error(_LE('Malformed interface record: %s'),
LOG.error('Malformed interface record: %s',
iface, data=data)
continue
@ -166,8 +166,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
continue
if not netutils.is_valid_mac(mac):
LOG.warning(_LW('MAC %(mac)s for interface %(name)s is '
'not valid, skipping'),
LOG.warning('MAC %(mac)s for interface %(name)s is '
'not valid, skipping',
{'mac': mac, 'name': name},
data=data)
continue
@ -193,8 +193,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
pxe_mac = utils.get_pxe_mac(data)
if not pxe_mac and CONF.processing.add_ports == 'pxe':
LOG.warning(_LW('No boot interface provided in the introspection '
'data, will add all ports with IP addresses'))
LOG.warning('No boot interface provided in the introspection '
'data, will add all ports with IP addresses')
result = {}
@ -241,7 +241,7 @@ class ValidateInterfacesHook(base.ProcessingHook):
interfaces = self._validate_interfaces(all_interfaces,
introspection_data)
LOG.info(_LI('Using network interface(s): %s'),
LOG.info('Using network interface(s): %s',
', '.join('%s %s' % (name, items)
for (name, items) in interfaces.items()),
data=introspection_data)
@ -266,8 +266,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
# list is required as we modify underlying dict
for port in list(node_info.ports().values()):
if port.address not in expected_macs:
LOG.info(_LI("Deleting port %(port)s as its MAC %(mac)s is "
"not in expected MAC list %(expected)s"),
LOG.info("Deleting port %(port)s as its MAC %(mac)s is "
"not in expected MAC list %(expected)s",
{'port': port.uuid,
'mac': port.address,
'expected': list(sorted(expected_macs))},

@ -22,7 +22,7 @@ from oslo_utils import uuidutils
import six
from sqlalchemy import orm
from ironic_inspector.common.i18n import _, _LE, _LI
from ironic_inspector.common.i18n import _
from ironic_inspector import db
from ironic_inspector.plugins import base as plugins_base
from ironic_inspector import utils
@ -153,9 +153,9 @@ class IntrospectionRule(object):
cond.field, node_info=node_info, data=data)
field_values = [None]
else:
LOG.info(_LI('Field with JSON path %(path)s was not found '
'in data, rule "%(rule)s" will not '
'be applied'),
LOG.info('Field with JSON path %(path)s was not found '
'in data, rule "%(rule)s" will not '
'be applied',
{'path': cond.field, 'rule': self.description},
node_info=node_info, data=data)
return False
@ -171,14 +171,14 @@ class IntrospectionRule(object):
break
if not result:
LOG.info(_LI('Rule "%(rule)s" will not be applied: condition '
'%(field)s %(op)s %(params)s failed'),
LOG.info('Rule "%(rule)s" will not be applied: condition '
'%(field)s %(op)s %(params)s failed',
{'rule': self.description, 'field': cond.field,
'op': cond.op, 'params': cond.params},
node_info=node_info, data=data)
return False
LOG.info(_LI('Rule "%s" will be applied'), self.description,
LOG.info('Rule "%s" will be applied', self.description,
node_info=node_info, data=data)
return True
@ -347,12 +347,12 @@ def create(conditions_json, actions_json, uuid=None,
rule.save(session)
except db_exc.DBDuplicateEntry as exc:
LOG.error(_LE('Database integrity error %s when '
'creating a rule'), exc)
LOG.error('Database integrity error %s when '
'creating a rule', exc)
raise utils.Error(_('Rule with UUID %s already exists') % uuid,
code=409)
LOG.info(_LI('Created rule %(uuid)s with description "%(descr)s"'),
LOG.info('Created rule %(uuid)s with description "%(descr)s"',
{'uuid': uuid, 'descr': description})
return IntrospectionRule(uuid=uuid,
conditions=rule.conditions,
@ -393,7 +393,7 @@ def delete(uuid):
if not count:
raise utils.Error(_('Rule %s was not found') % uuid, code=404)
LOG.info(_LI('Introspection rule %s was deleted'), uuid)
LOG.info('Introspection rule %s was deleted', uuid)
def delete_all():
@ -403,7 +403,7 @@ def delete_all():
db.model_query(db.RuleCondition, session=session).delete()
db.model_query(db.Rule, session=session).delete()
LOG.info(_LI('All introspection rules were deleted'))
LOG.info('All introspection rules were deleted')
def apply(node_info, data):
@ -440,5 +440,5 @@ def apply(node_info, data):
else:
LOG.debug('No actions to apply', node_info=node_info, data=data)
LOG.info(_LI('Successfully applied custom introspection rules'),
LOG.info('Successfully applied custom introspection rules',
node_info=node_info, data=data)

@ -52,11 +52,10 @@ class BaseTest(test_base.BaseTestCase):
self.addCleanup(db.get_engine().dispose)
plugins_base._HOOKS_MGR = None
node_cache._SEMAPHORES = lockutils.Semaphores()
for name in ('_', '_LI', '_LW', '_LE', '_LC'):
patch = mock.patch.object(i18n, name, lambda s: s)
patch.start()
# 'p=patch' magic is due to how closures work
self.addCleanup(lambda p=patch: p.stop())
patch = mock.patch.object(i18n, '_', lambda s: s)
patch.start()
# 'p=patch' magic is due to how closures work
self.addCleanup(lambda p=patch: p.stop())
utils._EXECUTOR = futurist.SynchronousExecutor(green=True)
def init_test_conf(self):

@ -38,7 +38,6 @@ from oslo_log import log as logging
from oslo_utils import uuidutils
import sqlalchemy
from ironic_inspector.common.i18n import _LE
from ironic_inspector import db
from ironic_inspector import dbsync
from ironic_inspector import introspection_state as istate
@ -140,8 +139,8 @@ class WalkVersionsMixin(object):
if check:
check(engine, data)
except Exception:
LOG.error(_LE("Failed to migrate to version %(version)s on engine "
"%(engine)s"),
LOG.error("Failed to migrate to version %(version)s on engine "
"%(engine)s",
{'version': version, 'engine': engine})
raise

@ -15,14 +15,14 @@ import datetime
import logging as pylog
import futurist
from ironicclient.v1 import node
from keystonemiddleware import auth_token
from oslo_config import cfg
from oslo_log import log
from oslo_middleware import cors as cors_middleware
import pytz
from ironicclient.v1 import node
from ironic_inspector.common.i18n import _, _LE, _LI
from ironic_inspector.common.i18n import _
from ironic_inspector import conf # noqa
CONF = cfg.CONF
@ -177,7 +177,7 @@ def check_auth(request):
raise Error(_('Authentication required'), code=401)
roles = (request.headers.get('X-Roles') or '').split(',')
if 'admin' not in roles:
LOG.error(_LE('Role "admin" not in user role list %s'), roles)
LOG.error('Role "admin" not in user role list %s', roles)
raise Error(_('Access denied'), code=403)
@ -205,8 +205,8 @@ def get_inventory(data, node_info=None):
'or empty') % key, data=data, node_info=node_info)
if not inventory.get('disks'):
LOG.info(_LI('No disks were detected in the inventory, assuming this '
'is a disk-less node'), data=data, node_info=node_info)
LOG.info('No disks were detected in the inventory, assuming this '
'is a disk-less node', data=data, node_info=node_info)
# Make sure the code iterating over it does not fail with a TypeError
inventory['disks'] = []

Loading…
Cancel
Save