Remove log translations

Starting with the Pike series, OpenStack no longer supports log
translation.

See:
http://lists.openstack.org/pipermail/openstack-i18n/2016-November/002574.html
http://lists.openstack.org/pipermail/openstack-dev/2017-March/113365.html

Change-Id: I4440a1d6c332e48845fceadb464dd34ab11e12d2
This commit is contained in:
Ngo Quoc Cuong 2017-07-04 23:52:35 -04:00
parent 17e1a85e2b
commit d0872fec2d
36 changed files with 249 additions and 258 deletions

View File

@ -43,7 +43,7 @@ def create_process(cmd, root_helper=None, addl_env=None,
cmd = map(str, cmd)
if debuglog:
LOG.debug(_("Running command: %s"), cmd)
LOG.debug("Running command: %s", cmd)
env = os.environ.copy()
if addl_env:
env.update(addl_env)

View File

@ -19,6 +19,7 @@ from tacker.vnfm.monitor_drivers.token import Token
from tacker import wsgi
# check alarm url with db --> move to plugin
LOG = logging.getLogger(__name__)
OPTS = [
@ -41,7 +42,7 @@ def config_opts():
class AlarmReceiver(wsgi.Middleware):
def process_request(self, req):
LOG.debug(_('Process request: %s'), req)
LOG.debug('Process request: %s', req)
if req.method != 'POST':
return
url = req.url

View File

@ -102,8 +102,8 @@ def _get_pagination_max_limit():
if max_limit == 0:
raise ValueError()
except ValueError:
LOG.warning(_("Invalid value for pagination_max_limit: %s. It "
"should be an integer greater to 0"),
LOG.warning("Invalid value for pagination_max_limit: %s. It "
"should be an integer greater to 0",
cfg.CONF.pagination_max_limit)
return max_limit

View File

@ -277,8 +277,7 @@ class ExtensionMiddleware(wsgi.Middleware):
(resource.parent["collection_name"],
resource.parent["member_name"]))
LOG.debug(_('Extended resource: %s'),
resource.collection)
LOG.debug('Extended resource: %s', resource.collection)
for action, method in (resource.collection_actions).items():
conditions = dict(method=[method])
path = "/%s/%s" % (resource.collection, action)
@ -299,7 +298,7 @@ class ExtensionMiddleware(wsgi.Middleware):
action_controllers = self._action_ext_controllers(application,
self.ext_mgr, mapper)
for action in self.ext_mgr.get_actions():
LOG.debug(_('Extended action: %s'), action.action_name)
LOG.debug('Extended action: %s', action.action_name)
controller = action_controllers[action.collection]
controller.add_action(action.action_name, action.handler)
@ -307,7 +306,7 @@ class ExtensionMiddleware(wsgi.Middleware):
req_controllers = self._request_ext_controllers(application,
self.ext_mgr, mapper)
for request_ext in self.ext_mgr.get_request_extensions():
LOG.debug(_('Extended request: %s'), request_ext.key)
LOG.debug('Extended request: %s', request_ext.key)
controller = req_controllers[request_ext.key]
controller.add_handler(request_ext.handler)
@ -405,7 +404,7 @@ class ExtensionManager(object):
return cls._instance
def __init__(self, path):
LOG.info(_('Initializing extension manager.'))
LOG.info('Initializing extension manager.')
self.path = path
self.extensions = {}
self._load_all_extensions()
@ -485,8 +484,8 @@ class ExtensionManager(object):
else:
attr_map[resource] = resource_attrs
except AttributeError:
LOG.exception(_("Error fetching extended attributes for "
"extension '%s'"), ext.get_name())
LOG.exception("Error fetching extended attributes for "
"extension '%s'", ext.get_name())
processed_exts.add(ext_name)
del exts_to_process[ext_name]
if len(processed_exts) == processed_ext_count:
@ -494,8 +493,8 @@ class ExtensionManager(object):
break
if exts_to_process:
# NOTE(salv-orlando): Consider whether this error should be fatal
LOG.error(_("It was impossible to process the following "
"extensions: %s because of missing requirements."),
LOG.error("It was impossible to process the following "
"extensions: %s because of missing requirements.",
','.join(exts_to_process.keys()))
# Extending extensions' attributes map.
@ -505,13 +504,13 @@ class ExtensionManager(object):
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
LOG.debug(_('Ext name: %s'), extension.get_name())
LOG.debug(_('Ext alias: %s'), extension.get_alias())
LOG.debug(_('Ext description: %s'), extension.get_description())
LOG.debug(_('Ext namespace: %s'), extension.get_namespace())
LOG.debug(_('Ext updated: %s'), extension.get_updated())
LOG.debug('Ext name: %s', extension.get_name())
LOG.debug('Ext alias: %s', extension.get_alias())
LOG.debug('Ext description: %s', extension.get_description())
LOG.debug('Ext namespace: %s', extension.get_namespace())
LOG.debug('Ext updated: %s', extension.get_updated())
except AttributeError as ex:
LOG.exception(_("Exception loading extension: %s"), ex)
LOG.exception("Exception loading extension: %s", ex)
return False
return True
@ -529,7 +528,7 @@ class ExtensionManager(object):
if os.path.exists(path):
self._load_all_extensions_from_path(path)
else:
LOG.error(_("Extension path '%s' doesn't exist!"), path)
LOG.error("Extension path '%s' doesn't exist!", path)
def _load_all_extensions_from_path(self, path):
# Sorting the extension list makes the order in which they
@ -537,7 +536,7 @@ class ExtensionManager(object):
# Tacker Servers
for f in sorted(os.listdir(path)):
try:
LOG.debug(_('Loading extension file: %s'), f)
LOG.debug('Loading extension file: %s', f)
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
ext_path = os.path.join(path, f)
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
@ -545,16 +544,16 @@ class ExtensionManager(object):
ext_name = mod_name[0].upper() + mod_name[1:]
new_ext_class = getattr(mod, ext_name, None)
if not new_ext_class:
LOG.warning(_('Did not find expected name '
'"%(ext_name)s" in %(file)s'),
LOG.warning('Did not find expected name '
'"%(ext_name)s" in %(file)s',
{'ext_name': ext_name,
'file': ext_path})
continue
new_ext = new_ext_class()
self.add_extension(new_ext)
except Exception as exception:
LOG.warning(_("Extension file %(f)s wasn't loaded due to "
"%(exception)s"),
LOG.warning("Extension file %(f)s wasn't loaded due to "
"%(exception)s",
{'f': f, 'exception': exception})
def add_extension(self, ext):
@ -563,7 +562,7 @@ class ExtensionManager(object):
return
alias = ext.get_alias()
LOG.info(_('Loaded extension: %s'), alias)
LOG.info('Loaded extension: %s', alias)
if alias in self.extensions:
raise exceptions.DuplicatedExtension(alias=alias)

View File

@ -73,8 +73,8 @@ class Controller(object):
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_("Allow sorting is enabled because native "
"pagination requires native sorting"))
LOG.info("Allow sorting is enabled because native "
"pagination requires native sorting")
self._allow_sorting = True
if parent:
@ -331,8 +331,7 @@ class Controller(object):
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the exception
LOG.exception(_("Unable to undo add for "
"%(resource)s %(id)s"),
LOG.exception("Unable to undo add for %(resource)s %(id)s",
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
@ -508,8 +507,8 @@ class Controller(object):
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug(_("Request body: %(body)s"), {'body':
strutils.mask_password(body)})
LOG.debug("Request body: %(body)s",
{'body': strutils.mask_password(body)})
prep_req_body = lambda x: Controller.prepare_request_body(
context,
x if resource in x else {resource: x},

View File

@ -79,16 +79,12 @@ def Resource(controller, faults=None, deserializers=None, serializers=None):
mapped_exc = api_common.convert_exception_to_http_exc(e, faults,
language)
if hasattr(mapped_exc, 'code') and 400 <= mapped_exc.code < 500:
LOG.info(_('%(action)s failed (client error): %(exc)s'),
LOG.info('%(action)s failed (client error): %(exc)s',
{'action': action, 'exc': mapped_exc})
else:
LOG.exception(
_('%(action)s failed: %(details)s'),
{
'action': action,
'details': extract_exc_details(e),
}
)
LOG.exception('%(action)s failed: %(details)s',
{'action': action,
'details': extract_exc_details(e)})
raise mapped_exc
status = action_status.get(action, 200)

View File

@ -32,7 +32,7 @@ class TackerKeystoneContext(wsgi.Middleware):
# Determine the user ID
user_id = req.headers.get('X_USER_ID')
if not user_id:
LOG.debug(_("X_USER_ID is not found in request"))
LOG.debug("X_USER_ID is not found in request")
return webob.exc.HTTPUnauthorized()
# Determine the tenant

View File

@ -64,27 +64,27 @@ class RemoteCommandExecutor(object):
self.__ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
self.__ssh.connect(self.__host, username=self.__user,
password=self.__password, timeout=self.__timeout)
LOG.info(_("Connected to %s") % self.__host)
LOG.info("Connected to %s", self.__host)
except paramiko.AuthenticationException:
LOG.error(_("Authentication failed when connecting to %s")
% self.__host)
LOG.error("Authentication failed when connecting to %s",
self.__host)
raise exceptions.NotAuthorized
except paramiko.SSHException:
LOG.error(_("Could not connect to %s. Giving up") % self.__host)
LOG.error("Could not connect to %s. Giving up", self.__host)
raise
def close_session(self):
self.__ssh.close()
LOG.debug(_("Connection close"))
LOG.debug("Connection close")
def execute_command(self, cmd, input_data=None):
try:
stdin, stdout, stderr = self.__ssh.exec_command(cmd)
if input_data:
stdin.write(input_data)
LOG.debug(_("Input data written successfully"))
LOG.debug("Input data written successfully")
stdin.flush()
LOG.debug(_("Input data flushed"))
LOG.debug("Input data flushed")
stdin.channel.shutdown_write()
# NOTE (dkushwaha): There might be a case, when server can take
@ -96,11 +96,10 @@ class RemoteCommandExecutor(object):
cmd_err = stderr.readlines()
return_code = stdout.channel.recv_exit_status()
except paramiko.SSHException:
LOG.error(_("Command execution failed at %s. Giving up")
% self.__host)
LOG.error("Command execution failed at %s. Giving up", self.__host)
raise
result = CommandResult(cmd, cmd_out, cmd_err, return_code)
LOG.debug(_("Remote command execution result: %s"), result)
LOG.debug("Remote command execution result: %s", result)
return result
def __del__(self):

View File

@ -111,7 +111,7 @@ def setup_logging(conf):
"""
product_name = "tacker"
logging.setup(conf, product_name)
LOG.info(_("Logging enabled!"))
LOG.info("Logging enabled!")
def load_paste_app(app_name):
@ -127,7 +127,7 @@ def load_paste_app(app_name):
raise cfg.ConfigFilesNotFoundError(
config_files=[cfg.CONF.api_paste_config])
config_path = os.path.abspath(config_path)
LOG.info(_("Config paste file: %s"), config_path)
LOG.info("Config paste file: %s", config_path)
try:
app = deploy.loadapp("config:%s" % config_path, name=app_name)

View File

@ -44,7 +44,7 @@ class DriverManager(object):
self._drivers = dict((type_, ext.obj)
for (type_, ext) in drivers.items())
LOG.info(_("Registered drivers from %(namespace)s: %(keys)s"),
LOG.info("Registered drivers from %(namespace)s: %(keys)s",
{'namespace': namespace, 'keys': self._drivers.keys()})
@staticmethod

View File

@ -30,7 +30,7 @@ def log(method):
"method_name": method.__name__,
"args": strutils.mask_password(args[1:]),
"kwargs": strutils.mask_password(kwargs)}
LOG.debug(_('%(class_name)s method %(method_name)s'
' called with arguments %(args)s %(kwargs)s'), data)
LOG.debug('%(class_name)s method %(method_name)s'
' called with arguments %(args)s %(kwargs)s', data)
return method(*args, **kwargs)
return wrapper

View File

@ -65,7 +65,7 @@ class CommonServicesPluginDb(common_services.CommonServicesPluginBase,
timestamp=tstamp)
context.session.add(event_db)
except Exception as e:
LOG.exception(_("create event error: %s"), str(e))
LOG.exception("create event error: %s", str(e))
raise common_services.EventCreationFailureException(
error_str=str(e))
return self._make_event_dict(event_db)

View File

@ -212,5 +212,5 @@ class CommonDbMixin(object):
query = self._model_query(context, model)
return query.filter(model.name == name).one()
except orm_exc.NoResultFound:
LOG.info(_("No result found for %(name)s in %(model)s table"),
LOG.info("No result found for %(name)s in %(model)s table",
{'name': name, 'model': model})

View File

@ -155,7 +155,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
return dict((arg.key, arg.value) for arg in dev_attrs_db)
def _make_ns_dict(self, ns_db, fields=None):
LOG.debug(_('ns_db %s'), ns_db)
LOG.debug('ns_db %s', ns_db)
res = {}
key_list = ('id', 'tenant_id', 'nsd_id', 'name', 'description',
'vnf_ids', 'status', 'mgmt_urls', 'error_reason',
@ -166,7 +166,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
def create_nsd(self, context, nsd):
vnfds = nsd['vnfds']
nsd = nsd['nsd']
LOG.debug(_('nsd %s'), nsd)
LOG.debug('nsd %s', nsd)
tenant_id = self._get_tenant_id_for_create(context, nsd)
try:
@ -191,11 +191,11 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
raise exceptions.DuplicateEntity(
_type="nsd",
entry=e.columns)
LOG.debug(_('nsd_db %(nsd_db)s %(attributes)s '),
LOG.debug('nsd_db %(nsd_db)s %(attributes)s ',
{'nsd_db': nsd_db,
'attributes': nsd_db.attributes})
nsd_dict = self._make_nsd_dict(nsd_db)
LOG.debug(_('nsd_dict %s'), nsd_dict)
LOG.debug('nsd_dict %s', nsd_dict)
self._cos_db_plg.create_event(
context, res_id=nsd_dict['id'],
res_type=constants.RES_TYPE_NSD,
@ -240,7 +240,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
# reference implementation. needs to be overrided by subclass
def create_ns(self, context, ns):
LOG.debug(_('ns %s'), ns)
LOG.debug('ns %s', ns)
ns = ns['ns']
tenant_id = self._get_tenant_id_for_create(context, ns)
nsd_id = ns['nsd_id']
@ -279,7 +279,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
def create_ns_post(self, context, ns_id, mistral_obj,
vnfd_dict, error_reason):
LOG.debug(_('ns ID %s'), ns_id)
LOG.debug('ns ID %s', ns_id)
output = ast.literal_eval(mistral_obj.output)
mgmt_urls = dict()
vnf_ids = dict()

View File

@ -21,7 +21,6 @@ from oslo_log import log as logging
from six import iteritems
from sqlalchemy import orm
from sqlalchemy.orm import exc as orm_exc
from tacker._i18n import _
from tacker.db import db_base
from tacker.db import model_base
from tacker.db import models_v1
@ -219,7 +218,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
def create_vnffgd(self, context, vnffgd):
template = vnffgd['vnffgd']
LOG.debug(_('template %s'), template)
LOG.debug('template %s', template)
tenant_id = self._get_tenant_id_for_create(context, template)
with context.session.begin(subtransactions=True):
@ -232,7 +231,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
template=template.get('template'))
context.session.add(template_db)
LOG.debug(_('template_db %(template_db)s'),
LOG.debug('template_db %(template_db)s',
{'template_db': template_db})
return self._make_template_dict(template_db)
@ -322,7 +321,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
# called internally, not by REST API
def _create_vnffg_pre(self, context, vnffg):
vnffg = vnffg['vnffg']
LOG.debug(_('vnffg %s'), vnffg)
LOG.debug('vnffg %s', vnffg)
tenant_id = self._get_tenant_id_for_create(context, vnffg)
name = vnffg.get('name')
vnffg_id = vnffg.get('id') or str(uuid.uuid4())
@ -332,7 +331,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
with context.session.begin(subtransactions=True):
template_db = self._get_resource(context, VnffgTemplate,
template_id)
LOG.debug(_('vnffg template %s'), template_db)
LOG.debug('vnffg template %s', template_db)
if vnffg.get('attributes') and \
vnffg['attributes'].get('param_values'):
@ -346,13 +345,13 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
vnf_members = self._get_vnffg_property(template_db.template,
'constituent_vnfs')
LOG.debug(_('Constituent VNFs: %s'), vnf_members)
LOG.debug('Constituent VNFs: %s', vnf_members)
vnf_mapping = self._get_vnf_mapping(context, vnffg.get(
'vnf_mapping'), vnf_members)
LOG.debug(_('VNF Mapping: %s'), vnf_mapping)
LOG.debug('VNF Mapping: %s', vnf_mapping)
# create NFP dict
nfp_dict = self._create_nfp_pre(template_db)
LOG.debug(_('NFP: %s'), nfp_dict)
LOG.debug('NFP: %s', nfp_dict)
vnffg_db = Vnffg(id=vnffg_id,
tenant_id=tenant_id,
name=name,
@ -377,7 +376,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
chain = self._create_port_chain(context, vnf_mapping, template_db,
nfp_dict['name'])
LOG.debug(_('chain: %s'), chain)
LOG.debug('chain: %s', chain)
sfc_db = VnffgChain(id=sfc_id,
tenant_id=tenant_id,
status=constants.PENDING_CREATE,
@ -398,7 +397,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
match = self._policy_to_acl_criteria(context, template_db,
nfp_dict['name'],
vnf_mapping)
LOG.debug(_('acl_match %s'), match)
LOG.debug('acl_match %s', match)
match_db_table = ACLMatchCriteria(
id=str(uuid.uuid4()),
@ -502,7 +501,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
attr_val = VnffgPluginDbMixin._search_value(
template['node_templates'][nfp], attribute)
if attr_val is None:
LOG.debug(_('NFP %(nfp)s, attr %(attr)s'),
LOG.debug('NFP %(nfp)s, attr %(attr)s',
{'nfp': template['node_templates'][nfp],
'attr': attribute})
raise nfvo.NfpAttributeNotFoundException(attribute=attribute)
@ -546,14 +545,14 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
# that match VNFD
if vnf_mapping is None or vnfd not in vnf_mapping.keys():
# find suitable VNFs from vnfd_id
LOG.debug(_('Searching VNFS with id %s'), vnfd_id)
LOG.debug('Searching VNFS with id %s', vnfd_id)
vnf_list = vnfm_plugin.get_vnfs(context,
{'vnfd_id': [vnfd_id]},
fields=['id'])
if len(vnf_list) == 0:
raise nfvo.VnffgInvalidMappingException(vnfd_name=vnfd)
else:
LOG.debug(_('Matching VNFs found %s'), vnf_list)
LOG.debug('Matching VNFs found %s', vnf_list)
vnf_list = [vnf['id'] for vnf in vnf_list]
if len(vnf_list) > 1:
new_mapping[vnfd] = random.choice(vnf_list)
@ -581,7 +580,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
:param vnfs: List of VNF instance IDs
:return: None
"""
LOG.debug(_('validating vim for vnfs %s'), vnfs)
LOG.debug('validating vim for vnfs %s', vnfs)
vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM']
vim_id = None
for vnf in vnfs:
@ -670,9 +669,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
# instance_id = None means error on creation
def _create_vnffg_post(self, context, sfc_instance_id,
fc_instance_id, vnffg_dict):
LOG.debug(_('SFC created instance is %s'), sfc_instance_id)
LOG.debug(_('Flow Classifier created instance is %s'),
fc_instance_id)
LOG.debug('SFC created instance is %s', sfc_instance_id)
LOG.debug('Flow Classifier created instance is %s', fc_instance_id)
nfp_dict = self.get_nfp(context, vnffg_dict['forwarding_paths'])
sfc_id = nfp_dict['chain_id']
classifier_id = nfp_dict['classifier_id']
@ -723,8 +721,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
nfp_query.update({'status': status})
def _make_vnffg_dict(self, vnffg_db, fields=None):
LOG.debug(_('vnffg_db %s'), vnffg_db)
LOG.debug(_('vnffg_db nfp %s'), vnffg_db.forwarding_paths)
LOG.debug('vnffg_db %s', vnffg_db)
LOG.debug('vnffg_db nfp %s', vnffg_db.forwarding_paths)
res = {
'forwarding_paths': vnffg_db.forwarding_paths[0]['id']
}
@ -917,8 +915,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
if entry[key]}
def _make_classifier_dict(self, classifier_db, fields=None):
LOG.debug(_('classifier_db %s'), classifier_db)
LOG.debug(_('classifier_db match %s'), classifier_db.match)
LOG.debug('classifier_db %s', classifier_db)
LOG.debug('classifier_db match %s', classifier_db.match)
res = {
'match': self._make_acl_match_dict(classifier_db.match)
}
@ -928,7 +926,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
return self._fields(res, fields)
def _make_nfp_dict(self, nfp_db, fields=None):
LOG.debug(_('nfp_db %s'), nfp_db)
LOG.debug('nfp_db %s', nfp_db)
res = {'chain_id': nfp_db.chain['id'],
'classifier_id': nfp_db.classifier['id']}
key_list = ('name', 'id', 'tenant_id', 'symmetrical', 'status',
@ -937,7 +935,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
return self._fields(res, fields)
def _make_chain_dict(self, chain_db, fields=None):
LOG.debug(_('chain_db %s'), chain_db)
LOG.debug('chain_db %s', chain_db)
res = {}
key_list = ('id', 'tenant_id', 'symmetrical', 'status', 'chain',
'path_id', 'nfp_id', 'instance_id')

View File

@ -216,8 +216,8 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
return dict((arg.key, arg.value) for arg in dev_attrs_db)
def _make_vnf_dict(self, vnf_db, fields=None):
LOG.debug(_('vnf_db %s'), vnf_db)
LOG.debug(_('vnf_db attributes %s'), vnf_db.attributes)
LOG.debug('vnf_db %s', vnf_db)
LOG.debug('vnf_db attributes %s', vnf_db.attributes)
res = {
'vnfd':
self._make_vnfd_dict(vnf_db.vnfd),
@ -239,14 +239,14 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
def create_vnfd(self, context, vnfd):
vnfd = vnfd['vnfd']
LOG.debug(_('vnfd %s'), vnfd)
LOG.debug('vnfd %s', vnfd)
tenant_id = self._get_tenant_id_for_create(context, vnfd)
service_types = vnfd.get('service_types')
mgmt_driver = vnfd.get('mgmt_driver')
template_source = vnfd.get("template_source")
if (not attributes.is_attr_set(service_types)):
LOG.debug(_('service types unspecified'))
LOG.debug('service types unspecified')
raise vnfm.ServiceTypesNotSpecified()
try:
@ -280,11 +280,11 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
raise exceptions.DuplicateEntity(
_type="vnfd",
entry=e.columns)
LOG.debug(_('vnfd_db %(vnfd_db)s %(attributes)s '),
LOG.debug('vnfd_db %(vnfd_db)s %(attributes)s ',
{'vnfd_db': vnfd_db,
'attributes': vnfd_db.attributes})
vnfd_dict = self._make_vnfd_dict(vnfd_db)
LOG.debug(_('vnfd_dict %s'), vnfd_dict)
LOG.debug('vnfd_dict %s', vnfd_dict)
self._cos_db_plg.create_event(
context, res_id=vnfd_dict['id'],
res_type=constants.RES_TYPE_VNFD,
@ -352,7 +352,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
def choose_vnfd(self, context, service_type,
required_attributes=None):
required_attributes = required_attributes or []
LOG.debug(_('required_attributes %s'), required_attributes)
LOG.debug('required_attributes %s', required_attributes)
with context.session.begin(subtransactions=True):
query = (
context.session.query(VNFD).
@ -368,7 +368,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
VNFD.id ==
VNFDAttribute.vnfd_id,
VNFDAttribute.key == key)))
LOG.debug(_('statements %s'), query)
LOG.debug('statements %s', query)
vnfd_db = query.first()
if vnfd_db:
return self._make_vnfd_dict(vnfd_db)
@ -388,7 +388,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
# called internally, not by REST API
def _create_vnf_pre(self, context, vnf):
LOG.debug(_('vnf %s'), vnf)
LOG.debug('vnf %s', vnf)
tenant_id = self._get_tenant_id_for_create(context, vnf)
vnfd_id = vnf['vnfd_id']
name = vnf.get('name')
@ -435,7 +435,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
# intsance_id = None means error on creation
def _create_vnf_post(self, context, vnf_id, instance_id,
mgmt_url, vnf_dict):
LOG.debug(_('vnf_dict %s'), vnf_dict)
LOG.debug('vnf_dict %s', vnf_dict)
with context.session.begin(subtransactions=True):
query = (self._model_query(context, VNF).
filter(VNF.id == vnf_id).
@ -656,7 +656,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
filter(~VNF.status.in_(exclude_status)).
with_lockmode('update').one())
except orm_exc.NoResultFound:
LOG.warning(_('no vnf found %s'), vnf_id)
LOG.warning('no vnf found %s', vnf_id)
return False
vnf_db.update({'status': new_status})

View File

@ -133,11 +133,11 @@ class TackerManager(object):
plugin_providers = cfg.CONF.service_plugins
if 'commonservices' not in plugin_providers:
plugin_providers.append('commonservices')
LOG.debug(_("Loading service plugins: %s"), plugin_providers)
LOG.debug("Loading service plugins: %s", plugin_providers)
for provider in plugin_providers:
if provider == '':
continue
LOG.info(_("Loading Plugin: %s"), provider)
LOG.info("Loading Plugin: %s", provider)
plugin_inst = self._get_plugin_instance('tacker.service_plugins',
provider)
@ -156,8 +156,8 @@ class TackerManager(object):
# hasattr(plugin_inst, 'agent_notifiers')):
# self.plugin.agent_notifiers.update(plugin_inst.agent_notifiers)
LOG.debug(_("Successfully loaded %(type)s plugin. "
"Description: %(desc)s"),
LOG.debug("Successfully loaded %(type)s plugin. "
"Description: %(desc)s",
{"type": plugin_inst.get_plugin_type(),
"desc": plugin_inst.get_plugin_description()})

View File

@ -154,7 +154,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
try:
keystone_version = self.keystone.get_version(auth_url)
except Exception as e:
LOG.error(_('VIM Auth URL invalid'))
LOG.error('VIM Auth URL invalid')
raise nfvo.VimConnectionException(message=e.message)
return keystone_version
@ -186,7 +186,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
try:
regions_list = self._find_regions(ks_client)
except (exceptions.Unauthorized, exceptions.BadRequest) as e:
LOG.warning(_("Authorization failed for user"))
LOG.warning("Authorization failed for user")
raise nfvo.VimUnauthorizedException(message=e.message)
vim_obj['placement_attr'] = {'regions': regions_list}
return vim_obj
@ -309,7 +309,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
try:
resources = getattr(client, "%s" % cmd)(**cmd_args)[vim_res_name]
LOG.debug(_('resources output %s'), resources)
LOG.debug('resources output %s', resources)
except Exception:
raise nfvo.VimGetResourceException(
cmd=cmd, name=resource_name, type=resource_type)
@ -351,13 +351,13 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
return None
if not auth_attr:
LOG.warning(_("auth information required for n-sfc driver"))
LOG.warning("auth information required for n-sfc driver")
return None
if symmetrical:
LOG.warning(_("n-sfc driver does not support symmetrical"))
LOG.warning("n-sfc driver does not support symmetrical")
raise NotImplementedError('symmetrical chain not supported')
LOG.debug(_('fc passed is %s'), fc)
LOG.debug('fc passed is %s', fc)
sfc_classifier_params = {}
for field in fc:
if field in FC_MAP:
@ -368,10 +368,10 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
raise ValueError('protocol %s not supported' % fc[field])
sfc_classifier_params['protocol'] = protocol
else:
LOG.warning(_("flow classifier %s not supported by "
"networking-sfc driver"), field)
LOG.warning("flow classifier %s not supported by "
"networking-sfc driver", field)
LOG.debug(_('sfc_classifier_params is %s'), sfc_classifier_params)
LOG.debug('sfc_classifier_params is %s', sfc_classifier_params)
if len(sfc_classifier_params) > 0:
neutronclient_ = NeutronClient(auth_attr)
@ -384,11 +384,11 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
def create_chain(self, name, fc_id, vnfs, symmetrical=False,
auth_attr=None):
if not auth_attr:
LOG.warning(_("auth information required for n-sfc driver"))
LOG.warning("auth information required for n-sfc driver")
return None
if symmetrical:
LOG.warning(_("n-sfc driver does not support symmetrical"))
LOG.warning("n-sfc driver does not support symmetrical")
raise NotImplementedError('symmetrical chain not supported')
neutronclient_ = NeutronClient(auth_attr)
@ -404,16 +404,16 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
'port pair group for %s' % vnf['name']
port_pair_group['port_pairs'] = []
if CONNECTION_POINT not in vnf:
LOG.warning(_("Chain creation failed due to missing "
"connection point info in VNF "
"%(vnfname)s"), {'vnfname': vnf['name']})
LOG.warning("Chain creation failed due to missing "
"connection point info in VNF "
"%(vnfname)s", {'vnfname': vnf['name']})
return None
cp_list = vnf[CONNECTION_POINT]
num_cps = len(cp_list)
if num_cps != 1 and num_cps != 2:
LOG.warning(_("Chain creation failed due to wrong number of "
"connection points: expected [1 | 2], got "
"%(cps)d"), {'cps': num_cps})
LOG.warning("Chain creation failed due to wrong number of "
"connection points: expected [1 | 2], got "
"%(cps)d", {'cps': num_cps})
return None
port_pair = {}
port_pair['name'] = vnf['name'] + '-connection-points'
@ -426,16 +426,16 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
port_pair['egress'] = cp_list[1]
port_pair_id = neutronclient_.port_pair_create(port_pair)
if not port_pair_id:
LOG.warning(_("Chain creation failed due to port pair creation"
" failed for vnf %(vnf)s"), {'vnf': vnf['name']})
LOG.warning("Chain creation failed due to port pair creation"
" failed for vnf %(vnf)s", {'vnf': vnf['name']})
return None
port_pair_group['port_pairs'].append(port_pair_id)
port_pair_group_id = \
neutronclient_.port_pair_group_create(port_pair_group)
if not port_pair_group_id:
LOG.warning(_("Chain creation failed due to port pair group "
"creation failed for vnf "
"%(vnf)s"), {'vnf': vnf['name']})
LOG.warning("Chain creation failed due to port pair group "
"creation failed for vnf "
"%(vnf)s", {'vnf': vnf['name']})
return None
port_pair_group_list.append(port_pair_group_id)
@ -455,12 +455,12 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
# it will look it up (or reconstruct) from
# networking-sfc DB --- but the caveat is that
# the VNF name MUST be unique
LOG.warning(_("n-sfc driver does not support sf chain update"))
LOG.warning("n-sfc driver does not support sf chain update")
raise NotImplementedError('sf chain update not supported')
def delete_chain(self, chain_id, auth_attr=None):
if not auth_attr:
LOG.warning(_("auth information required for n-sfc driver"))
LOG.warning("auth information required for n-sfc driver")
return None
neutronclient_ = NeutronClient(auth_attr)
@ -469,11 +469,11 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
def update_flow_classifier(self, fc_id, fc,
symmetrical=False, auth_attr=None):
if not auth_attr:
LOG.warning(_("auth information required for n-sfc driver"))
LOG.warning("auth information required for n-sfc driver")
return None
if symmetrical:
LOG.warning(_("n-sfc driver does not support symmetrical"))
LOG.warning("n-sfc driver does not support symmetrical")
raise NotImplementedError('symmetrical chain not supported')
# for now, the only parameters allowed for flow-classifier-update
@ -483,7 +483,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
sfc_classifier_params['name'] = fc['name']
sfc_classifier_params['description'] = fc['description']
LOG.debug(_('sfc_classifier_params is %s'), sfc_classifier_params)
LOG.debug('sfc_classifier_params is %s', sfc_classifier_params)
neutronclient_ = NeutronClient(auth_attr)
return neutronclient_.flow_classifier_update(fc_id,
@ -491,7 +491,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
def delete_flow_classifier(self, fc_id, auth_attr=None):
if not auth_attr:
LOG.warning(_("auth information required for n-sfc driver"))
LOG.warning("auth information required for n-sfc driver")
raise EnvironmentError('auth attribute required for'
' networking-sfc driver')
@ -500,7 +500,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
def get_mistral_client(self, auth_dict):
if not auth_dict:
LOG.warning(_("auth dict required to instantiate mistral client"))
LOG.warning("auth dict required to instantiate mistral client")
raise EnvironmentError('auth dict required for'
' mistral workflow driver')
return mistral_client.MistralClient(
@ -547,7 +547,7 @@ class NeutronClient(object):
self.client = neutron_client.Client(session=sess)
def flow_classifier_create(self, fc_dict):
LOG.debug(_("fc_dict passed is {fc_dict}").format(fc_dict=fc_dict))
LOG.debug("fc_dict passed is {fc_dict}".format(fc_dict=fc_dict))
fc = self.client.create_flow_classifier({'flow_classifier': fc_dict})
if fc:
return fc['flow_classifier']['id']
@ -562,14 +562,14 @@ class NeutronClient(object):
try:
self.client.delete_flow_classifier(fc_id)
except nc_exceptions.NotFound:
LOG.warning(_("fc %s not found"), fc_id)
LOG.warning("fc %s not found", fc_id)
raise ValueError('fc %s not found' % fc_id)
def port_pair_create(self, port_pair_dict):
try:
pp = self.client.create_port_pair({'port_pair': port_pair_dict})
except nc_exceptions.BadRequest as e:
LOG.error(_("create port pair returns %s"), e)
LOG.error("create port pair returns %s", e)
raise ValueError(str(e))
if pp and len(pp):
@ -581,7 +581,7 @@ class NeutronClient(object):
try:
self.client.delete_port_pair(port_pair_id)
except nc_exceptions.NotFound:
LOG.warning(_('port pair %s not found'), port_pair_id)
LOG.warning('port pair %s not found', port_pair_id)
raise ValueError('port pair %s not found' % port_pair_id)
def port_pair_group_create(self, ppg_dict):
@ -589,7 +589,7 @@ class NeutronClient(object):
ppg = self.client.create_port_pair_group(
{'port_pair_group': ppg_dict})
except nc_exceptions.BadRequest as e:
LOG.warning(_('create port pair group returns %s'), e)
LOG.warning('create port pair group returns %s', e)
raise ValueError(str(e))
if ppg and len(ppg):
@ -601,7 +601,7 @@ class NeutronClient(object):
try:
self.client.delete_port_pair_group(ppg_id)
except nc_exceptions.NotFound:
LOG.warning(_('port pair group %s not found'), ppg_id)
LOG.warning('port pair group %s not found', ppg_id)
raise ValueError('port pair group %s not found' % ppg_id)
def port_chain_create(self, port_chain_dict):
@ -609,7 +609,7 @@ class NeutronClient(object):
pc = self.client.create_port_chain(
{'port_chain': port_chain_dict})
except nc_exceptions.BadRequest as e:
LOG.warning(_('create port chain returns %s'), e)
LOG.warning('create port chain returns %s', e)
raise ValueError(str(e))
if pc and len(pc):
@ -634,5 +634,5 @@ class NeutronClient(object):
pp_id = port_pairs[j]
self.client.delete_port_pair(pp_id)
except nc_exceptions.NotFound:
LOG.warning(_('port chain %s not found'), port_chain_id)
LOG.warning('port chain %s not found', port_chain_id)
raise ValueError('port chain %s not found' % port_chain_id)

View File

@ -48,7 +48,7 @@ class VNFFGNoop(abstract_vnffg_driver.VnffgAbstractDriver):
@log.log
def update_chain(self, chain_id, fc_ids, vnfs, auth_attr=None):
if chain_id not in self._instances:
LOG.debug(_('Chain not found'))
LOG.debug('Chain not found')
raise ValueError('No chain instance %s' % chain_id)
@log.log
@ -64,7 +64,7 @@ class VNFFGNoop(abstract_vnffg_driver.VnffgAbstractDriver):
@log.log
def update_flow_classifier(self, fc_id, fc, auth_attr=None):
if fc_id not in self._instances:
LOG.debug(_('FC not found'))
LOG.debug('FC not found')
raise ValueError('No FC instance %s' % fc_id)
@log.log

View File

@ -101,7 +101,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
@log.log
def create_vim(self, context, vim):
LOG.debug(_('Create vim called with parameters %s'),
LOG.debug('Create vim called with parameters %s',
strutils.mask_password(vim))
vim_obj = vim['vim']
vim_type = vim_obj['type']
@ -212,7 +212,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
'template'
)
LOG.debug(_('template yaml: %s'), template)
LOG.debug('template yaml: %s', template)
toscautils.updateimports(template)
@ -220,7 +220,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
tosca_template.ToscaTemplate(
a_file=False, yaml_dict_tpl=template)
except Exception as e:
LOG.exception(_("tosca-parser error: %s"), str(e))
LOG.exception("tosca-parser error: %s", str(e))
raise nfvo.ToscaParserFailed(error_msg_details=str(e))
@log.log
@ -321,7 +321,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
vnffg_dict = super(NfvoPlugin, self)._update_vnffg_pre(context,
vnffg_id)
new_vnffg = vnffg['vnffg']
LOG.debug(_('vnffg update: %s'), vnffg)
LOG.debug('vnffg update: %s', vnffg)
nfp = super(NfvoPlugin, self).get_nfp(context,
vnffg_dict['forwarding_paths'])
sfc = super(NfvoPlugin, self).get_sfc(context, nfp['chain_id'])
@ -346,7 +346,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
'vnf_mapping'],
template_db,
nfp['name'])
LOG.debug(_('chain update: %s'), chain)
LOG.debug('chain update: %s', chain)
sfc['chain'] = chain
sfc['symmetrical'] = new_vnffg['symmetrical']
vim_obj = self._get_vim_from_vnf(context,
@ -464,7 +464,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
f = fernet.Fernet(vim_key)
if not f:
LOG.warning(_('Unable to decode VIM auth'))
LOG.warning('Unable to decode VIM auth')
raise nfvo.VimNotFoundException(
'Unable to decode VIM auth key')
return f.decrypt(cred)
@ -472,10 +472,10 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
@staticmethod
def _find_vim_key(vim_id):
key_file = os.path.join(CONF.vim_keys.openstack, vim_id)
LOG.debug(_('Attempting to open key file for vim id %s'), vim_id)
LOG.debug('Attempting to open key file for vim id %s', vim_id)
with open(key_file, 'r') as f:
return f.read()
LOG.warning(_('VIM id invalid or key not found for %s'), vim_id)
LOG.warning('VIM id invalid or key not found for %s', vim_id)
def _vim_resource_name_to_id(self, context, resource, name, vnf_id):
"""Converts a VIM resource name to its ID
@ -501,7 +501,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
if isinstance(template, dict):
nsd_data['attributes']['nsd'] = yaml.safe_dump(
template)
LOG.debug(_('nsd %s'), nsd_data)
LOG.debug('nsd %s', nsd_data)
self._parse_template_input(context, nsd)
return super(NfvoPlugin, self).create_nsd(
@ -512,7 +512,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
nsd_yaml = nsd_dict['attributes'].get('nsd')
inner_nsd_dict = yaml.safe_load(nsd_yaml)
nsd['vnfds'] = dict()
LOG.debug(_('nsd_dict: %s'), inner_nsd_dict)
LOG.debug('nsd_dict: %s', inner_nsd_dict)
vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM']
vnfd_imports = inner_nsd_dict['imports']
@ -542,7 +542,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
ToscaTemplate(a_file=False,
yaml_dict_tpl=inner_nsd_dict)
except Exception as e:
LOG.exception(_("tosca-parser error: %s"), str(e))
LOG.exception("tosca-parser error: %s", str(e))
raise nfvo.ToscaParserFailed(error_msg_details=str(e))
finally:
for file_path in new_files:
@ -559,7 +559,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
nsd_dict['name'] = inner_nsd_dict['metadata'].get(
'template_name', '')
LOG.debug(_('nsd %s'), nsd)
LOG.debug('nsd %s', nsd)
def _get_vnfd_id(self, vnfd_name, onboarded_vnfds):
for vnfd in onboarded_vnfds:
@ -648,7 +648,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
workflow=workflow,
auth_dict=self.get_auth_dict(context))
except Exception as ex:
LOG.error(_('Error while executing workflow: %s'), ex)
LOG.error('Error while executing workflow: %s', ex)
self._vim_drivers.invoke(driver_type,
'delete_workflow',
workflow_id=workflow['id'],
@ -666,7 +666,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
'get_execution',
execution_id=execution_id,
auth_dict=self.get_auth_dict(context)).state
LOG.debug(_('status: %s'), exec_state)
LOG.debug('status: %s', exec_state)
if exec_state == 'SUCCESS' or exec_state == 'ERROR':
break
mistral_retries = mistral_retries - 1
@ -740,7 +740,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
kwargs={
'ns': ns})
except nfvo.NoTasksException:
LOG.warning(_("No VNF deletion task(s)."))
LOG.warning("No VNF deletion task(s).")
if workflow:
try:
mistral_execution = self._vim_drivers.invoke(