Remove translation of log messages Part-1

The i18n team has decided not to translate the logs because it seems
like it not very useful.

This are the files as of now do not have merge conflicts.

Change-Id: I082f6302f120a8c0c81482b73be301e83fe3a3a8
Partial-Bug: #1674374
This commit is contained in:
Annie Lezil 2017-03-21 06:35:55 +00:00
parent fec8c78600
commit c06a9a7f24
14 changed files with 92 additions and 98 deletions

View File

@ -18,7 +18,7 @@ from ironicclient import exceptions as ironic_exc
import netaddr import netaddr
from oslo_config import cfg from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LW from ironic_inspector.common.i18n import _
from ironic_inspector.common import keystone from ironic_inspector.common import keystone
from ironic_inspector import utils from ironic_inspector import utils
@ -108,7 +108,7 @@ def get_ipmi_address(node):
node_info=node) node_info=node)
if netaddr.IPAddress(ip).is_loopback(): if netaddr.IPAddress(ip).is_loopback():
LOG.warning(_LW('Ignoring loopback BMC address %s'), ip, LOG.warning('Ignoring loopback BMC address %s', ip,
node_info=node) node_info=node)
ip = None ip = None

View File

@ -18,7 +18,7 @@ import binascii
from construct import core from construct import core
import netaddr import netaddr
from ironic_inspector.common.i18n import _, _LW from ironic_inspector.common.i18n import _
from ironic_inspector.common import lldp_tlvs as tlv from ironic_inspector.common import lldp_tlvs as tlv
from ironic_inspector import utils from ironic_inspector import utils
@ -136,7 +136,7 @@ class LLDPParser(object):
name = s[2] name = s[2]
check_len = s[3] check_len = s[3]
except KeyError as e: except KeyError as e:
LOG.warning(_LW("Key error in TLV table: %s"), e, LOG.warning("Key error in TLV table: %s", e,
node_info=self.node_info) node_info=self.node_info)
return False return False
@ -144,10 +144,11 @@ class LLDPParser(object):
# proper number of bytes has been provided, for example # proper number of bytes has been provided, for example
# when a BitStruct is used. # when a BitStruct is used.
if check_len and (tlv_parser.sizeof() != len(data)): if check_len and (tlv_parser.sizeof() != len(data)):
LOG.warning(_LW('Invalid data for %(name)s ' LOG.warning("Invalid data for %(name)s expected len %(expect)d, "
'expected len %(expect)d, got %(actual)d'), "got %(actual)d", {'name': name,
{'name': name, 'expect': tlv_parser.sizeof(), 'expect': tlv_parser.sizeof(),
'actual': len(data)}, node_info=self.node_info) 'actual': len(data)},
node_info=self.node_info)
return False return False
# Use the construct parser to parse TLV so that it's # Use the construct parser to parse TLV so that it's
@ -156,7 +157,7 @@ class LLDPParser(object):
struct = tlv_parser.parse(data) struct = tlv_parser.parse(data)
except (core.RangeError, core.FieldError, core.MappingError, except (core.RangeError, core.FieldError, core.MappingError,
netaddr.AddrFormatError) as e: netaddr.AddrFormatError) as e:
LOG.warning(_LW("TLV parse error: %s"), e, LOG.warning("TLV parse error: %s", e,
node_info=self.node_info) node_info=self.node_info)
return False return False
@ -164,7 +165,7 @@ class LLDPParser(object):
try: try:
func(struct, name, data) func(struct, name, data)
except ValueError as e: except ValueError as e:
LOG.warning(_LW("TLV value error: %s"), e, LOG.warning("TLV value error: %s", e,
node_info=self.node_info) node_info=self.node_info)
return False return False
@ -271,8 +272,8 @@ class LLDPBasicMgmtParser(LLDPParser):
else: else:
LOG.debug("Subtype %d not found for 802.3", subtype) LOG.debug("Subtype %d not found for 802.3", subtype)
else: else:
LOG.warning(_LW("Organizationally Unique ID %s not " LOG.warning("Organizationally Unique ID %s not "
"recognized"), oui, node_info=self.node_info) "recognized", oui, node_info=self.node_info)
class LLDPdot1Parser(LLDPParser): class LLDPdot1Parser(LLDPParser):

View File

@ -20,7 +20,7 @@ from oslo_log import log
import six import six
import stevedore import stevedore
from ironic_inspector.common.i18n import _, _LW from ironic_inspector.common.i18n import _
CONF = cfg.CONF CONF = cfg.CONF
@ -207,8 +207,8 @@ def rule_actions_manager():
for act in _ACTIONS_MGR: for act in _ACTIONS_MGR:
# a trick to detect if function was overridden # a trick to detect if function was overridden
if "rollback" in act.obj.__class__.__dict__: if "rollback" in act.obj.__class__.__dict__:
LOG.warning(_LW('Defining "rollback" for introspection rules ' LOG.warning('Defining "rollback" for introspection rules '
'actions is deprecated (action "%s")'), 'actions is deprecated (action "%s")',
act.name) act.name)
return _ACTIONS_MGR return _ACTIONS_MGR

View File

@ -15,7 +15,7 @@
from oslo_config import cfg from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LI, _LW from ironic_inspector.common.i18n import _
from ironic_inspector.plugins import base from ironic_inspector.plugins import base
from ironic_inspector import utils from ironic_inspector import utils
@ -56,19 +56,19 @@ class CapabilitiesHook(base.ProcessingHook):
def _detect_boot_mode(self, inventory, node_info, data=None): def _detect_boot_mode(self, inventory, node_info, data=None):
boot_mode = inventory.get('boot', {}).get('current_boot_mode') boot_mode = inventory.get('boot', {}).get('current_boot_mode')
if boot_mode is not None: if boot_mode is not None:
LOG.info(_LI('Boot mode was %s'), boot_mode, LOG.info('Boot mode was %s', boot_mode,
data=data, node_info=node_info) data=data, node_info=node_info)
return {'boot_mode': boot_mode} return {'boot_mode': boot_mode}
else: else:
LOG.warning(_LW('No boot mode information available'), LOG.warning('No boot mode information available',
data=data, node_info=node_info) data=data, node_info=node_info)
return {} return {}
def _detect_cpu_flags(self, inventory, node_info, data=None): def _detect_cpu_flags(self, inventory, node_info, data=None):
flags = inventory['cpu'].get('flags') flags = inventory['cpu'].get('flags')
if not flags: if not flags:
LOG.warning(_LW('No CPU flags available, please update your ' LOG.warning('No CPU flags available, please update your '
'introspection ramdisk'), 'introspection ramdisk',
data=data, node_info=node_info) data=data, node_info=node_info)
return {} return {}
@ -78,7 +78,7 @@ class CapabilitiesHook(base.ProcessingHook):
if flag in flags: if flag in flags:
caps[name] = 'true' caps[name] = 'true'
LOG.info(_LI('CPU capabilities: %s'), list(caps), LOG.info('CPU capabilities: %s', list(caps),
data=data, node_info=node_info) data=data, node_info=node_info)
return caps return caps

View File

@ -15,7 +15,7 @@
from oslo_config import cfg from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LW from ironic_inspector.common.i18n import _
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector import utils from ironic_inspector import utils
@ -46,8 +46,8 @@ def _extract_node_driver_info(introspection_data):
if ipmi_address: if ipmi_address:
node_driver_info['ipmi_address'] = ipmi_address node_driver_info['ipmi_address'] = ipmi_address
else: else:
LOG.warning(_LW('No BMC address provided, discovered node will be ' LOG.warning('No BMC address provided, discovered node will be '
'created without ipmi address')) 'created without ipmi address')
return node_driver_info return node_driver_info
@ -63,9 +63,9 @@ def _check_existing_nodes(introspection_data, node_driver_info, ironic):
_('Port %(mac)s already exists, uuid: %(uuid)s') % _('Port %(mac)s already exists, uuid: %(uuid)s') %
{'mac': mac, 'uuid': ports[0].uuid}, data=introspection_data) {'mac': mac, 'uuid': ports[0].uuid}, data=introspection_data)
else: else:
LOG.warning(_LW('No suitable interfaces found for discovered node. ' LOG.warning('No suitable interfaces found for discovered node. '
'Check that validate_interfaces hook is listed in ' 'Check that validate_interfaces hook is listed in '
'[processing]default_processing_hooks config option')) '[processing]default_processing_hooks config option')
# verify existing node with discovered ipmi address # verify existing node with discovered ipmi address
ipmi_address = node_driver_info.get('ipmi_address') ipmi_address = node_driver_info.get('ipmi_address')

View File

@ -20,7 +20,6 @@ is stored in the 'inspector' container.
import json import json
from ironic_inspector.common.i18n import _LW
from ironic_inspector.common import swift from ironic_inspector.common import swift
from ironic_inspector.plugins import base from ironic_inspector.plugins import base
from ironic_inspector import utils from ironic_inspector import utils
@ -47,9 +46,9 @@ class ExtraHardwareHook(base.ProcessingHook):
Otherwise, it does nothing. Otherwise, it does nothing.
""" """
if 'data' not in introspection_data: if 'data' not in introspection_data:
LOG.warning(_LW('No extra hardware information was received from ' LOG.warning('No extra hardware information was received from '
'the ramdisk'), 'the ramdisk', node_info=node_info,
node_info=node_info, data=introspection_data) data=introspection_data)
return return
data = introspection_data['data'] data = introspection_data['data']
@ -66,10 +65,10 @@ class ExtraHardwareHook(base.ProcessingHook):
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
introspection_data['extra'] = self._convert_edeploy_data(data) introspection_data['extra'] = self._convert_edeploy_data(data)
else: else:
LOG.warning(_LW('Extra hardware data was not in a recognised ' LOG.warning('Extra hardware data was not in a recognised '
'format (eDeploy), and will not be forwarded to ' 'format (eDeploy), and will not be forwarded to '
'introspection rules'), 'introspection rules', node_info=node_info,
node_info=node_info, data=introspection_data) data=introspection_data)
LOG.debug('Deleting \"data\" key from introspection data as it is ' LOG.debug('Deleting \"data\" key from introspection data as it is '
'assumed unusable by introspection rules. Raw data is ' 'assumed unusable by introspection rules. Raw data is '

View File

@ -15,7 +15,6 @@
import binascii import binascii
from ironic_inspector.common.i18n import _LW
from ironic_inspector.common import lldp_parsers from ironic_inspector.common import lldp_parsers
from ironic_inspector.plugins import base from ironic_inspector.plugins import base
from ironic_inspector import utils from ironic_inspector import utils
@ -48,9 +47,9 @@ class LLDPBasicProcessingHook(base.ProcessingHook):
try: try:
data = bytearray(binascii.a2b_hex(tlv_value)) data = bytearray(binascii.a2b_hex(tlv_value))
except TypeError as e: except TypeError as e:
LOG.warning(_LW( LOG.warning(
"TLV value for TLV type %(tlv_type)d not in correct " "TLV value for TLV type %(tlv_type)d not in correct "
"format, value must be in hexadecimal: %(msg)s"), "format, value must be in hexadecimal: %(msg)s",
{'tlv_type': tlv_type, 'msg': e}, node_info=node_info) {'tlv_type': tlv_type, 'msg': e}, node_info=node_info)
continue continue
@ -73,7 +72,7 @@ class LLDPBasicProcessingHook(base.ProcessingHook):
tlvs = iface.get('lldp') tlvs = iface.get('lldp')
if tlvs is None: if tlvs is None:
LOG.warning(_LW("No LLDP Data found for interface %s"), LOG.warning("No LLDP Data found for interface %s",
if_name, node_info=node_info) if_name, node_info=node_info)
continue continue

View File

@ -18,7 +18,7 @@ import json
from oslo_config import cfg from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LI, _LW, _LE from ironic_inspector.common.i18n import _
from ironic_inspector.plugins import base from ironic_inspector.plugins import base
from ironic_inspector import utils from ironic_inspector import utils
@ -49,12 +49,12 @@ def _parse_pci_alias_entry():
try: try:
parsed_entry = json.loads(pci_alias_entry) parsed_entry = json.loads(pci_alias_entry)
if set(parsed_entry) != {'vendor_id', 'product_id', 'name'}: if set(parsed_entry) != {'vendor_id', 'product_id', 'name'}:
raise KeyError(_LE("The 'alias' entry should contain " raise KeyError("The 'alias' entry should contain "
"exactly 'vendor_id', 'product_id' and " "exactly 'vendor_id', 'product_id' and "
"'name' keys")) "'name' keys")
parsed_pci_devices.append(parsed_entry) parsed_pci_devices.append(parsed_entry)
except (ValueError, KeyError) as ex: except (ValueError, KeyError) as ex:
LOG.error(_LE("Error parsing 'alias' option: %s"), ex) LOG.error("Error parsing 'alias' option: %s", ex)
return {(dev['vendor_id'], dev['product_id']): dev['name'] return {(dev['vendor_id'], dev['product_id']): dev['name']
for dev in parsed_pci_devices} for dev in parsed_pci_devices}
@ -75,13 +75,12 @@ class PciDevicesHook(base.ProcessingHook):
def before_update(self, introspection_data, node_info, **kwargs): def before_update(self, introspection_data, node_info, **kwargs):
if 'pci_devices' not in introspection_data: if 'pci_devices' not in introspection_data:
if CONF.pci_devices.alias: if CONF.pci_devices.alias:
LOG.warning(_LW('No PCI devices information was received from ' LOG.warning('No PCI devices information was received from '
'the ramdisk.')) 'the ramdisk.')
return return
alias_count = {self.aliases[id_pair]: count for id_pair, count in alias_count = {self.aliases[id_pair]: count for id_pair, count in
self._found_pci_devices_count( self._found_pci_devices_count(
introspection_data['pci_devices']).items()} introspection_data['pci_devices']).items()}
if alias_count: if alias_count:
node_info.update_capabilities(**alias_count) node_info.update_capabilities(**alias_count)
LOG.info(_LI('Found the following PCI devices: %s'), LOG.info('Found the following PCI devices: %s', alias_count)
alias_count)

View File

@ -13,7 +13,6 @@
"""Gather root device hint from recognized block devices.""" """Gather root device hint from recognized block devices."""
from ironic_inspector.common.i18n import _LI, _LW
from ironic_inspector.plugins import base from ironic_inspector.plugins import base
from ironic_inspector import utils from ironic_inspector import utils
@ -53,22 +52,21 @@ class RaidDeviceDetection(base.ProcessingHook):
def before_processing(self, introspection_data, **kwargs): def before_processing(self, introspection_data, **kwargs):
"""Adds fake local_gb value if it's missing from introspection_data.""" """Adds fake local_gb value if it's missing from introspection_data."""
if not introspection_data.get('local_gb'): if not introspection_data.get('local_gb'):
LOG.info(_LI('No volume is found on the node. Adding a fake ' LOG.info('No volume is found on the node. Adding a fake '
'value for "local_gb"'), 'value for "local_gb"', data=introspection_data)
data=introspection_data)
introspection_data['local_gb'] = 1 introspection_data['local_gb'] = 1
def before_update(self, introspection_data, node_info, **kwargs): def before_update(self, introspection_data, node_info, **kwargs):
current_devices = self._get_serials(introspection_data) current_devices = self._get_serials(introspection_data)
if not current_devices: if not current_devices:
LOG.warning(_LW('No block device was received from ramdisk'), LOG.warning('No block device was received from ramdisk',
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
return return
node = node_info.node() node = node_info.node()
if 'root_device' in node.properties: if 'root_device' in node.properties:
LOG.info(_LI('Root device is already known for the node'), LOG.info('Root device is already known for the node',
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
return return
@ -79,12 +77,12 @@ class RaidDeviceDetection(base.ProcessingHook):
if device not in previous_devices] if device not in previous_devices]
if len(new_devices) > 1: if len(new_devices) > 1:
LOG.warning(_LW('Root device cannot be identified because ' LOG.warning('Root device cannot be identified because '
'multiple new devices were found'), 'multiple new devices were found',
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
return return
elif len(new_devices) == 0: elif len(new_devices) == 0:
LOG.warning(_LW('No new devices were found'), LOG.warning('No new devices were found',
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
return return

View File

@ -22,7 +22,7 @@ from oslo_utils import netutils
from oslo_utils import units from oslo_utils import units
import six import six
from ironic_inspector.common.i18n import _, _LC, _LE, _LI, _LW from ironic_inspector.common.i18n import _
from ironic_inspector import conf from ironic_inspector import conf
from ironic_inspector.plugins import base from ironic_inspector.plugins import base
from ironic_inspector import utils from ironic_inspector import utils
@ -111,8 +111,8 @@ class SchedulerHook(base.ProcessingHook):
'; '.join(errors), '; '.join(errors),
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
LOG.info(_LI('Discovered data: CPUs: %(cpus)s %(cpu_arch)s, ' LOG.info('Discovered data: CPUs: %(cpus)s %(cpu_arch)s, '
'memory %(memory_mb)s MiB, disk %(local_gb)s GiB'), 'memory %(memory_mb)s MiB, disk %(local_gb)s GiB',
{key: introspection_data.get(key) for key in self.KEYS}, {key: introspection_data.get(key) for key in self.KEYS},
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
@ -128,15 +128,15 @@ class ValidateInterfacesHook(base.ProcessingHook):
def __init__(self): def __init__(self):
if CONF.processing.add_ports not in conf.VALID_ADD_PORTS_VALUES: if CONF.processing.add_ports not in conf.VALID_ADD_PORTS_VALUES:
LOG.critical(_LC('Accepted values for [processing]add_ports are ' LOG.critical('Accepted values for [processing]add_ports are '
'%(valid)s, got %(actual)s'), '%(valid)s, got %(actual)s',
{'valid': conf.VALID_ADD_PORTS_VALUES, {'valid': conf.VALID_ADD_PORTS_VALUES,
'actual': CONF.processing.add_ports}) 'actual': CONF.processing.add_ports})
sys.exit(1) sys.exit(1)
if CONF.processing.keep_ports not in conf.VALID_KEEP_PORTS_VALUES: if CONF.processing.keep_ports not in conf.VALID_KEEP_PORTS_VALUES:
LOG.critical(_LC('Accepted values for [processing]keep_ports are ' LOG.critical('Accepted values for [processing]keep_ports are '
'%(valid)s, got %(actual)s'), '%(valid)s, got %(actual)s',
{'valid': conf.VALID_KEEP_PORTS_VALUES, {'valid': conf.VALID_KEEP_PORTS_VALUES,
'actual': CONF.processing.keep_ports}) 'actual': CONF.processing.keep_ports})
sys.exit(1) sys.exit(1)
@ -156,7 +156,7 @@ class ValidateInterfacesHook(base.ProcessingHook):
client_id = iface.get('client_id') client_id = iface.get('client_id')
if not name: if not name:
LOG.error(_LE('Malformed interface record: %s'), LOG.error('Malformed interface record: %s',
iface, data=data) iface, data=data)
continue continue
@ -166,8 +166,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
continue continue
if not netutils.is_valid_mac(mac): if not netutils.is_valid_mac(mac):
LOG.warning(_LW('MAC %(mac)s for interface %(name)s is ' LOG.warning('MAC %(mac)s for interface %(name)s is '
'not valid, skipping'), 'not valid, skipping',
{'mac': mac, 'name': name}, {'mac': mac, 'name': name},
data=data) data=data)
continue continue
@ -193,8 +193,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
pxe_mac = utils.get_pxe_mac(data) pxe_mac = utils.get_pxe_mac(data)
if not pxe_mac and CONF.processing.add_ports == 'pxe': if not pxe_mac and CONF.processing.add_ports == 'pxe':
LOG.warning(_LW('No boot interface provided in the introspection ' LOG.warning('No boot interface provided in the introspection '
'data, will add all ports with IP addresses')) 'data, will add all ports with IP addresses')
result = {} result = {}
@ -241,7 +241,7 @@ class ValidateInterfacesHook(base.ProcessingHook):
interfaces = self._validate_interfaces(all_interfaces, interfaces = self._validate_interfaces(all_interfaces,
introspection_data) introspection_data)
LOG.info(_LI('Using network interface(s): %s'), LOG.info('Using network interface(s): %s',
', '.join('%s %s' % (name, items) ', '.join('%s %s' % (name, items)
for (name, items) in interfaces.items()), for (name, items) in interfaces.items()),
data=introspection_data) data=introspection_data)
@ -266,8 +266,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
# list is required as we modify underlying dict # list is required as we modify underlying dict
for port in list(node_info.ports().values()): for port in list(node_info.ports().values()):
if port.address not in expected_macs: if port.address not in expected_macs:
LOG.info(_LI("Deleting port %(port)s as its MAC %(mac)s is " LOG.info("Deleting port %(port)s as its MAC %(mac)s is "
"not in expected MAC list %(expected)s"), "not in expected MAC list %(expected)s",
{'port': port.uuid, {'port': port.uuid,
'mac': port.address, 'mac': port.address,
'expected': list(sorted(expected_macs))}, 'expected': list(sorted(expected_macs))},

View File

@ -22,7 +22,7 @@ from oslo_utils import uuidutils
import six import six
from sqlalchemy import orm from sqlalchemy import orm
from ironic_inspector.common.i18n import _, _LE, _LI from ironic_inspector.common.i18n import _
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector.plugins import base as plugins_base from ironic_inspector.plugins import base as plugins_base
from ironic_inspector import utils from ironic_inspector import utils
@ -153,9 +153,9 @@ class IntrospectionRule(object):
cond.field, node_info=node_info, data=data) cond.field, node_info=node_info, data=data)
field_values = [None] field_values = [None]
else: else:
LOG.info(_LI('Field with JSON path %(path)s was not found ' LOG.info('Field with JSON path %(path)s was not found '
'in data, rule "%(rule)s" will not ' 'in data, rule "%(rule)s" will not '
'be applied'), 'be applied',
{'path': cond.field, 'rule': self.description}, {'path': cond.field, 'rule': self.description},
node_info=node_info, data=data) node_info=node_info, data=data)
return False return False
@ -171,14 +171,14 @@ class IntrospectionRule(object):
break break
if not result: if not result:
LOG.info(_LI('Rule "%(rule)s" will not be applied: condition ' LOG.info('Rule "%(rule)s" will not be applied: condition '
'%(field)s %(op)s %(params)s failed'), '%(field)s %(op)s %(params)s failed',
{'rule': self.description, 'field': cond.field, {'rule': self.description, 'field': cond.field,
'op': cond.op, 'params': cond.params}, 'op': cond.op, 'params': cond.params},
node_info=node_info, data=data) node_info=node_info, data=data)
return False return False
LOG.info(_LI('Rule "%s" will be applied'), self.description, LOG.info('Rule "%s" will be applied', self.description,
node_info=node_info, data=data) node_info=node_info, data=data)
return True return True
@ -347,12 +347,12 @@ def create(conditions_json, actions_json, uuid=None,
rule.save(session) rule.save(session)
except db_exc.DBDuplicateEntry as exc: except db_exc.DBDuplicateEntry as exc:
LOG.error(_LE('Database integrity error %s when ' LOG.error('Database integrity error %s when '
'creating a rule'), exc) 'creating a rule', exc)
raise utils.Error(_('Rule with UUID %s already exists') % uuid, raise utils.Error(_('Rule with UUID %s already exists') % uuid,
code=409) code=409)
LOG.info(_LI('Created rule %(uuid)s with description "%(descr)s"'), LOG.info('Created rule %(uuid)s with description "%(descr)s"',
{'uuid': uuid, 'descr': description}) {'uuid': uuid, 'descr': description})
return IntrospectionRule(uuid=uuid, return IntrospectionRule(uuid=uuid,
conditions=rule.conditions, conditions=rule.conditions,
@ -393,7 +393,7 @@ def delete(uuid):
if not count: if not count:
raise utils.Error(_('Rule %s was not found') % uuid, code=404) raise utils.Error(_('Rule %s was not found') % uuid, code=404)
LOG.info(_LI('Introspection rule %s was deleted'), uuid) LOG.info('Introspection rule %s was deleted', uuid)
def delete_all(): def delete_all():
@ -403,7 +403,7 @@ def delete_all():
db.model_query(db.RuleCondition, session=session).delete() db.model_query(db.RuleCondition, session=session).delete()
db.model_query(db.Rule, session=session).delete() db.model_query(db.Rule, session=session).delete()
LOG.info(_LI('All introspection rules were deleted')) LOG.info('All introspection rules were deleted')
def apply(node_info, data): def apply(node_info, data):
@ -440,5 +440,5 @@ def apply(node_info, data):
else: else:
LOG.debug('No actions to apply', node_info=node_info, data=data) LOG.debug('No actions to apply', node_info=node_info, data=data)
LOG.info(_LI('Successfully applied custom introspection rules'), LOG.info('Successfully applied custom introspection rules',
node_info=node_info, data=data) node_info=node_info, data=data)

View File

@ -52,11 +52,10 @@ class BaseTest(test_base.BaseTestCase):
self.addCleanup(db.get_engine().dispose) self.addCleanup(db.get_engine().dispose)
plugins_base._HOOKS_MGR = None plugins_base._HOOKS_MGR = None
node_cache._SEMAPHORES = lockutils.Semaphores() node_cache._SEMAPHORES = lockutils.Semaphores()
for name in ('_', '_LI', '_LW', '_LE', '_LC'): patch = mock.patch.object(i18n, '_', lambda s: s)
patch = mock.patch.object(i18n, name, lambda s: s) patch.start()
patch.start() # 'p=patch' magic is due to how closures work
# 'p=patch' magic is due to how closures work self.addCleanup(lambda p=patch: p.stop())
self.addCleanup(lambda p=patch: p.stop())
utils._EXECUTOR = futurist.SynchronousExecutor(green=True) utils._EXECUTOR = futurist.SynchronousExecutor(green=True)
def init_test_conf(self): def init_test_conf(self):

View File

@ -38,7 +38,6 @@ from oslo_log import log as logging
from oslo_utils import uuidutils from oslo_utils import uuidutils
import sqlalchemy import sqlalchemy
from ironic_inspector.common.i18n import _LE
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector import dbsync from ironic_inspector import dbsync
from ironic_inspector import introspection_state as istate from ironic_inspector import introspection_state as istate
@ -140,8 +139,8 @@ class WalkVersionsMixin(object):
if check: if check:
check(engine, data) check(engine, data)
except Exception: except Exception:
LOG.error(_LE("Failed to migrate to version %(version)s on engine " LOG.error("Failed to migrate to version %(version)s on engine "
"%(engine)s"), "%(engine)s",
{'version': version, 'engine': engine}) {'version': version, 'engine': engine})
raise raise

View File

@ -15,14 +15,14 @@ import datetime
import logging as pylog import logging as pylog
import futurist import futurist
from ironicclient.v1 import node
from keystonemiddleware import auth_token from keystonemiddleware import auth_token
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from oslo_middleware import cors as cors_middleware from oslo_middleware import cors as cors_middleware
import pytz import pytz
from ironicclient.v1 import node from ironic_inspector.common.i18n import _
from ironic_inspector.common.i18n import _, _LE, _LI
from ironic_inspector import conf # noqa from ironic_inspector import conf # noqa
CONF = cfg.CONF CONF = cfg.CONF
@ -177,7 +177,7 @@ def check_auth(request):
raise Error(_('Authentication required'), code=401) raise Error(_('Authentication required'), code=401)
roles = (request.headers.get('X-Roles') or '').split(',') roles = (request.headers.get('X-Roles') or '').split(',')
if 'admin' not in roles: if 'admin' not in roles:
LOG.error(_LE('Role "admin" not in user role list %s'), roles) LOG.error('Role "admin" not in user role list %s', roles)
raise Error(_('Access denied'), code=403) raise Error(_('Access denied'), code=403)
@ -205,8 +205,8 @@ def get_inventory(data, node_info=None):
'or empty') % key, data=data, node_info=node_info) 'or empty') % key, data=data, node_info=node_info)
if not inventory.get('disks'): if not inventory.get('disks'):
LOG.info(_LI('No disks were detected in the inventory, assuming this ' LOG.info('No disks were detected in the inventory, assuming this '
'is a disk-less node'), data=data, node_info=node_info) 'is a disk-less node', data=data, node_info=node_info)
# Make sure the code iterating over it does not fail with a TypeError # Make sure the code iterating over it does not fail with a TypeError
inventory['disks'] = [] inventory['disks'] = []