Remove log translations
Starting with the Pike series, OpenStack no longer supports log translation. See: http://lists.openstack.org/pipermail/openstack-i18n/2016-November/002574.html http://lists.openstack.org/pipermail/openstack-dev/2017-March/113365.html Change-Id: I4440a1d6c332e48845fceadb464dd34ab11e12d2
This commit is contained in:
parent
17e1a85e2b
commit
d0872fec2d
@ -43,7 +43,7 @@ def create_process(cmd, root_helper=None, addl_env=None,
|
|||||||
cmd = map(str, cmd)
|
cmd = map(str, cmd)
|
||||||
|
|
||||||
if debuglog:
|
if debuglog:
|
||||||
LOG.debug(_("Running command: %s"), cmd)
|
LOG.debug("Running command: %s", cmd)
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
if addl_env:
|
if addl_env:
|
||||||
env.update(addl_env)
|
env.update(addl_env)
|
||||||
|
@ -19,6 +19,7 @@ from tacker.vnfm.monitor_drivers.token import Token
|
|||||||
from tacker import wsgi
|
from tacker import wsgi
|
||||||
# check alarm url with db --> move to plugin
|
# check alarm url with db --> move to plugin
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
OPTS = [
|
OPTS = [
|
||||||
@ -41,7 +42,7 @@ def config_opts():
|
|||||||
|
|
||||||
class AlarmReceiver(wsgi.Middleware):
|
class AlarmReceiver(wsgi.Middleware):
|
||||||
def process_request(self, req):
|
def process_request(self, req):
|
||||||
LOG.debug(_('Process request: %s'), req)
|
LOG.debug('Process request: %s', req)
|
||||||
if req.method != 'POST':
|
if req.method != 'POST':
|
||||||
return
|
return
|
||||||
url = req.url
|
url = req.url
|
||||||
|
@ -102,8 +102,8 @@ def _get_pagination_max_limit():
|
|||||||
if max_limit == 0:
|
if max_limit == 0:
|
||||||
raise ValueError()
|
raise ValueError()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
LOG.warning(_("Invalid value for pagination_max_limit: %s. It "
|
LOG.warning("Invalid value for pagination_max_limit: %s. It "
|
||||||
"should be an integer greater to 0"),
|
"should be an integer greater to 0",
|
||||||
cfg.CONF.pagination_max_limit)
|
cfg.CONF.pagination_max_limit)
|
||||||
return max_limit
|
return max_limit
|
||||||
|
|
||||||
|
@ -277,8 +277,7 @@ class ExtensionMiddleware(wsgi.Middleware):
|
|||||||
(resource.parent["collection_name"],
|
(resource.parent["collection_name"],
|
||||||
resource.parent["member_name"]))
|
resource.parent["member_name"]))
|
||||||
|
|
||||||
LOG.debug(_('Extended resource: %s'),
|
LOG.debug('Extended resource: %s', resource.collection)
|
||||||
resource.collection)
|
|
||||||
for action, method in (resource.collection_actions).items():
|
for action, method in (resource.collection_actions).items():
|
||||||
conditions = dict(method=[method])
|
conditions = dict(method=[method])
|
||||||
path = "/%s/%s" % (resource.collection, action)
|
path = "/%s/%s" % (resource.collection, action)
|
||||||
@ -299,7 +298,7 @@ class ExtensionMiddleware(wsgi.Middleware):
|
|||||||
action_controllers = self._action_ext_controllers(application,
|
action_controllers = self._action_ext_controllers(application,
|
||||||
self.ext_mgr, mapper)
|
self.ext_mgr, mapper)
|
||||||
for action in self.ext_mgr.get_actions():
|
for action in self.ext_mgr.get_actions():
|
||||||
LOG.debug(_('Extended action: %s'), action.action_name)
|
LOG.debug('Extended action: %s', action.action_name)
|
||||||
controller = action_controllers[action.collection]
|
controller = action_controllers[action.collection]
|
||||||
controller.add_action(action.action_name, action.handler)
|
controller.add_action(action.action_name, action.handler)
|
||||||
|
|
||||||
@ -307,7 +306,7 @@ class ExtensionMiddleware(wsgi.Middleware):
|
|||||||
req_controllers = self._request_ext_controllers(application,
|
req_controllers = self._request_ext_controllers(application,
|
||||||
self.ext_mgr, mapper)
|
self.ext_mgr, mapper)
|
||||||
for request_ext in self.ext_mgr.get_request_extensions():
|
for request_ext in self.ext_mgr.get_request_extensions():
|
||||||
LOG.debug(_('Extended request: %s'), request_ext.key)
|
LOG.debug('Extended request: %s', request_ext.key)
|
||||||
controller = req_controllers[request_ext.key]
|
controller = req_controllers[request_ext.key]
|
||||||
controller.add_handler(request_ext.handler)
|
controller.add_handler(request_ext.handler)
|
||||||
|
|
||||||
@ -405,7 +404,7 @@ class ExtensionManager(object):
|
|||||||
return cls._instance
|
return cls._instance
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
LOG.info(_('Initializing extension manager.'))
|
LOG.info('Initializing extension manager.')
|
||||||
self.path = path
|
self.path = path
|
||||||
self.extensions = {}
|
self.extensions = {}
|
||||||
self._load_all_extensions()
|
self._load_all_extensions()
|
||||||
@ -485,8 +484,8 @@ class ExtensionManager(object):
|
|||||||
else:
|
else:
|
||||||
attr_map[resource] = resource_attrs
|
attr_map[resource] = resource_attrs
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOG.exception(_("Error fetching extended attributes for "
|
LOG.exception("Error fetching extended attributes for "
|
||||||
"extension '%s'"), ext.get_name())
|
"extension '%s'", ext.get_name())
|
||||||
processed_exts.add(ext_name)
|
processed_exts.add(ext_name)
|
||||||
del exts_to_process[ext_name]
|
del exts_to_process[ext_name]
|
||||||
if len(processed_exts) == processed_ext_count:
|
if len(processed_exts) == processed_ext_count:
|
||||||
@ -494,8 +493,8 @@ class ExtensionManager(object):
|
|||||||
break
|
break
|
||||||
if exts_to_process:
|
if exts_to_process:
|
||||||
# NOTE(salv-orlando): Consider whether this error should be fatal
|
# NOTE(salv-orlando): Consider whether this error should be fatal
|
||||||
LOG.error(_("It was impossible to process the following "
|
LOG.error("It was impossible to process the following "
|
||||||
"extensions: %s because of missing requirements."),
|
"extensions: %s because of missing requirements.",
|
||||||
','.join(exts_to_process.keys()))
|
','.join(exts_to_process.keys()))
|
||||||
|
|
||||||
# Extending extensions' attributes map.
|
# Extending extensions' attributes map.
|
||||||
@ -505,13 +504,13 @@ class ExtensionManager(object):
|
|||||||
def _check_extension(self, extension):
|
def _check_extension(self, extension):
|
||||||
"""Checks for required methods in extension objects."""
|
"""Checks for required methods in extension objects."""
|
||||||
try:
|
try:
|
||||||
LOG.debug(_('Ext name: %s'), extension.get_name())
|
LOG.debug('Ext name: %s', extension.get_name())
|
||||||
LOG.debug(_('Ext alias: %s'), extension.get_alias())
|
LOG.debug('Ext alias: %s', extension.get_alias())
|
||||||
LOG.debug(_('Ext description: %s'), extension.get_description())
|
LOG.debug('Ext description: %s', extension.get_description())
|
||||||
LOG.debug(_('Ext namespace: %s'), extension.get_namespace())
|
LOG.debug('Ext namespace: %s', extension.get_namespace())
|
||||||
LOG.debug(_('Ext updated: %s'), extension.get_updated())
|
LOG.debug('Ext updated: %s', extension.get_updated())
|
||||||
except AttributeError as ex:
|
except AttributeError as ex:
|
||||||
LOG.exception(_("Exception loading extension: %s"), ex)
|
LOG.exception("Exception loading extension: %s", ex)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -529,7 +528,7 @@ class ExtensionManager(object):
|
|||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
self._load_all_extensions_from_path(path)
|
self._load_all_extensions_from_path(path)
|
||||||
else:
|
else:
|
||||||
LOG.error(_("Extension path '%s' doesn't exist!"), path)
|
LOG.error("Extension path '%s' doesn't exist!", path)
|
||||||
|
|
||||||
def _load_all_extensions_from_path(self, path):
|
def _load_all_extensions_from_path(self, path):
|
||||||
# Sorting the extension list makes the order in which they
|
# Sorting the extension list makes the order in which they
|
||||||
@ -537,7 +536,7 @@ class ExtensionManager(object):
|
|||||||
# Tacker Servers
|
# Tacker Servers
|
||||||
for f in sorted(os.listdir(path)):
|
for f in sorted(os.listdir(path)):
|
||||||
try:
|
try:
|
||||||
LOG.debug(_('Loading extension file: %s'), f)
|
LOG.debug('Loading extension file: %s', f)
|
||||||
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
|
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
|
||||||
ext_path = os.path.join(path, f)
|
ext_path = os.path.join(path, f)
|
||||||
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
|
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
|
||||||
@ -545,16 +544,16 @@ class ExtensionManager(object):
|
|||||||
ext_name = mod_name[0].upper() + mod_name[1:]
|
ext_name = mod_name[0].upper() + mod_name[1:]
|
||||||
new_ext_class = getattr(mod, ext_name, None)
|
new_ext_class = getattr(mod, ext_name, None)
|
||||||
if not new_ext_class:
|
if not new_ext_class:
|
||||||
LOG.warning(_('Did not find expected name '
|
LOG.warning('Did not find expected name '
|
||||||
'"%(ext_name)s" in %(file)s'),
|
'"%(ext_name)s" in %(file)s',
|
||||||
{'ext_name': ext_name,
|
{'ext_name': ext_name,
|
||||||
'file': ext_path})
|
'file': ext_path})
|
||||||
continue
|
continue
|
||||||
new_ext = new_ext_class()
|
new_ext = new_ext_class()
|
||||||
self.add_extension(new_ext)
|
self.add_extension(new_ext)
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
LOG.warning(_("Extension file %(f)s wasn't loaded due to "
|
LOG.warning("Extension file %(f)s wasn't loaded due to "
|
||||||
"%(exception)s"),
|
"%(exception)s",
|
||||||
{'f': f, 'exception': exception})
|
{'f': f, 'exception': exception})
|
||||||
|
|
||||||
def add_extension(self, ext):
|
def add_extension(self, ext):
|
||||||
@ -563,7 +562,7 @@ class ExtensionManager(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
alias = ext.get_alias()
|
alias = ext.get_alias()
|
||||||
LOG.info(_('Loaded extension: %s'), alias)
|
LOG.info('Loaded extension: %s', alias)
|
||||||
|
|
||||||
if alias in self.extensions:
|
if alias in self.extensions:
|
||||||
raise exceptions.DuplicatedExtension(alias=alias)
|
raise exceptions.DuplicatedExtension(alias=alias)
|
||||||
|
@ -73,8 +73,8 @@ class Controller(object):
|
|||||||
_("Native pagination depend on native sorting")
|
_("Native pagination depend on native sorting")
|
||||||
)
|
)
|
||||||
if not self._allow_sorting:
|
if not self._allow_sorting:
|
||||||
LOG.info(_("Allow sorting is enabled because native "
|
LOG.info("Allow sorting is enabled because native "
|
||||||
"pagination requires native sorting"))
|
"pagination requires native sorting")
|
||||||
self._allow_sorting = True
|
self._allow_sorting = True
|
||||||
|
|
||||||
if parent:
|
if parent:
|
||||||
@ -331,8 +331,7 @@ class Controller(object):
|
|||||||
obj_deleter(request.context, obj['id'], **kwargs)
|
obj_deleter(request.context, obj['id'], **kwargs)
|
||||||
except Exception:
|
except Exception:
|
||||||
# broad catch as our only purpose is to log the exception
|
# broad catch as our only purpose is to log the exception
|
||||||
LOG.exception(_("Unable to undo add for "
|
LOG.exception("Unable to undo add for %(resource)s %(id)s",
|
||||||
"%(resource)s %(id)s"),
|
|
||||||
{'resource': self._resource,
|
{'resource': self._resource,
|
||||||
'id': obj['id']})
|
'id': obj['id']})
|
||||||
# TODO(salvatore-orlando): The object being processed when the
|
# TODO(salvatore-orlando): The object being processed when the
|
||||||
@ -508,8 +507,8 @@ class Controller(object):
|
|||||||
if not body:
|
if not body:
|
||||||
raise webob.exc.HTTPBadRequest(_("Resource body required"))
|
raise webob.exc.HTTPBadRequest(_("Resource body required"))
|
||||||
|
|
||||||
LOG.debug(_("Request body: %(body)s"), {'body':
|
LOG.debug("Request body: %(body)s",
|
||||||
strutils.mask_password(body)})
|
{'body': strutils.mask_password(body)})
|
||||||
prep_req_body = lambda x: Controller.prepare_request_body(
|
prep_req_body = lambda x: Controller.prepare_request_body(
|
||||||
context,
|
context,
|
||||||
x if resource in x else {resource: x},
|
x if resource in x else {resource: x},
|
||||||
|
@ -79,16 +79,12 @@ def Resource(controller, faults=None, deserializers=None, serializers=None):
|
|||||||
mapped_exc = api_common.convert_exception_to_http_exc(e, faults,
|
mapped_exc = api_common.convert_exception_to_http_exc(e, faults,
|
||||||
language)
|
language)
|
||||||
if hasattr(mapped_exc, 'code') and 400 <= mapped_exc.code < 500:
|
if hasattr(mapped_exc, 'code') and 400 <= mapped_exc.code < 500:
|
||||||
LOG.info(_('%(action)s failed (client error): %(exc)s'),
|
LOG.info('%(action)s failed (client error): %(exc)s',
|
||||||
{'action': action, 'exc': mapped_exc})
|
{'action': action, 'exc': mapped_exc})
|
||||||
else:
|
else:
|
||||||
LOG.exception(
|
LOG.exception('%(action)s failed: %(details)s',
|
||||||
_('%(action)s failed: %(details)s'),
|
{'action': action,
|
||||||
{
|
'details': extract_exc_details(e)})
|
||||||
'action': action,
|
|
||||||
'details': extract_exc_details(e),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
raise mapped_exc
|
raise mapped_exc
|
||||||
|
|
||||||
status = action_status.get(action, 200)
|
status = action_status.get(action, 200)
|
||||||
|
@ -32,7 +32,7 @@ class TackerKeystoneContext(wsgi.Middleware):
|
|||||||
# Determine the user ID
|
# Determine the user ID
|
||||||
user_id = req.headers.get('X_USER_ID')
|
user_id = req.headers.get('X_USER_ID')
|
||||||
if not user_id:
|
if not user_id:
|
||||||
LOG.debug(_("X_USER_ID is not found in request"))
|
LOG.debug("X_USER_ID is not found in request")
|
||||||
return webob.exc.HTTPUnauthorized()
|
return webob.exc.HTTPUnauthorized()
|
||||||
|
|
||||||
# Determine the tenant
|
# Determine the tenant
|
||||||
|
@ -64,27 +64,27 @@ class RemoteCommandExecutor(object):
|
|||||||
self.__ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
|
self.__ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
|
||||||
self.__ssh.connect(self.__host, username=self.__user,
|
self.__ssh.connect(self.__host, username=self.__user,
|
||||||
password=self.__password, timeout=self.__timeout)
|
password=self.__password, timeout=self.__timeout)
|
||||||
LOG.info(_("Connected to %s") % self.__host)
|
LOG.info("Connected to %s", self.__host)
|
||||||
except paramiko.AuthenticationException:
|
except paramiko.AuthenticationException:
|
||||||
LOG.error(_("Authentication failed when connecting to %s")
|
LOG.error("Authentication failed when connecting to %s",
|
||||||
% self.__host)
|
self.__host)
|
||||||
raise exceptions.NotAuthorized
|
raise exceptions.NotAuthorized
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
LOG.error(_("Could not connect to %s. Giving up") % self.__host)
|
LOG.error("Could not connect to %s. Giving up", self.__host)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def close_session(self):
|
def close_session(self):
|
||||||
self.__ssh.close()
|
self.__ssh.close()
|
||||||
LOG.debug(_("Connection close"))
|
LOG.debug("Connection close")
|
||||||
|
|
||||||
def execute_command(self, cmd, input_data=None):
|
def execute_command(self, cmd, input_data=None):
|
||||||
try:
|
try:
|
||||||
stdin, stdout, stderr = self.__ssh.exec_command(cmd)
|
stdin, stdout, stderr = self.__ssh.exec_command(cmd)
|
||||||
if input_data:
|
if input_data:
|
||||||
stdin.write(input_data)
|
stdin.write(input_data)
|
||||||
LOG.debug(_("Input data written successfully"))
|
LOG.debug("Input data written successfully")
|
||||||
stdin.flush()
|
stdin.flush()
|
||||||
LOG.debug(_("Input data flushed"))
|
LOG.debug("Input data flushed")
|
||||||
stdin.channel.shutdown_write()
|
stdin.channel.shutdown_write()
|
||||||
|
|
||||||
# NOTE (dkushwaha): There might be a case, when server can take
|
# NOTE (dkushwaha): There might be a case, when server can take
|
||||||
@ -96,11 +96,10 @@ class RemoteCommandExecutor(object):
|
|||||||
cmd_err = stderr.readlines()
|
cmd_err = stderr.readlines()
|
||||||
return_code = stdout.channel.recv_exit_status()
|
return_code = stdout.channel.recv_exit_status()
|
||||||
except paramiko.SSHException:
|
except paramiko.SSHException:
|
||||||
LOG.error(_("Command execution failed at %s. Giving up")
|
LOG.error("Command execution failed at %s. Giving up", self.__host)
|
||||||
% self.__host)
|
|
||||||
raise
|
raise
|
||||||
result = CommandResult(cmd, cmd_out, cmd_err, return_code)
|
result = CommandResult(cmd, cmd_out, cmd_err, return_code)
|
||||||
LOG.debug(_("Remote command execution result: %s"), result)
|
LOG.debug("Remote command execution result: %s", result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
|
@ -111,7 +111,7 @@ def setup_logging(conf):
|
|||||||
"""
|
"""
|
||||||
product_name = "tacker"
|
product_name = "tacker"
|
||||||
logging.setup(conf, product_name)
|
logging.setup(conf, product_name)
|
||||||
LOG.info(_("Logging enabled!"))
|
LOG.info("Logging enabled!")
|
||||||
|
|
||||||
|
|
||||||
def load_paste_app(app_name):
|
def load_paste_app(app_name):
|
||||||
@ -127,7 +127,7 @@ def load_paste_app(app_name):
|
|||||||
raise cfg.ConfigFilesNotFoundError(
|
raise cfg.ConfigFilesNotFoundError(
|
||||||
config_files=[cfg.CONF.api_paste_config])
|
config_files=[cfg.CONF.api_paste_config])
|
||||||
config_path = os.path.abspath(config_path)
|
config_path = os.path.abspath(config_path)
|
||||||
LOG.info(_("Config paste file: %s"), config_path)
|
LOG.info("Config paste file: %s", config_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
app = deploy.loadapp("config:%s" % config_path, name=app_name)
|
app = deploy.loadapp("config:%s" % config_path, name=app_name)
|
||||||
|
@ -44,7 +44,7 @@ class DriverManager(object):
|
|||||||
|
|
||||||
self._drivers = dict((type_, ext.obj)
|
self._drivers = dict((type_, ext.obj)
|
||||||
for (type_, ext) in drivers.items())
|
for (type_, ext) in drivers.items())
|
||||||
LOG.info(_("Registered drivers from %(namespace)s: %(keys)s"),
|
LOG.info("Registered drivers from %(namespace)s: %(keys)s",
|
||||||
{'namespace': namespace, 'keys': self._drivers.keys()})
|
{'namespace': namespace, 'keys': self._drivers.keys()})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -30,7 +30,7 @@ def log(method):
|
|||||||
"method_name": method.__name__,
|
"method_name": method.__name__,
|
||||||
"args": strutils.mask_password(args[1:]),
|
"args": strutils.mask_password(args[1:]),
|
||||||
"kwargs": strutils.mask_password(kwargs)}
|
"kwargs": strutils.mask_password(kwargs)}
|
||||||
LOG.debug(_('%(class_name)s method %(method_name)s'
|
LOG.debug('%(class_name)s method %(method_name)s'
|
||||||
' called with arguments %(args)s %(kwargs)s'), data)
|
' called with arguments %(args)s %(kwargs)s', data)
|
||||||
return method(*args, **kwargs)
|
return method(*args, **kwargs)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@ -65,7 +65,7 @@ class CommonServicesPluginDb(common_services.CommonServicesPluginBase,
|
|||||||
timestamp=tstamp)
|
timestamp=tstamp)
|
||||||
context.session.add(event_db)
|
context.session.add(event_db)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.exception(_("create event error: %s"), str(e))
|
LOG.exception("create event error: %s", str(e))
|
||||||
raise common_services.EventCreationFailureException(
|
raise common_services.EventCreationFailureException(
|
||||||
error_str=str(e))
|
error_str=str(e))
|
||||||
return self._make_event_dict(event_db)
|
return self._make_event_dict(event_db)
|
||||||
|
@ -212,5 +212,5 @@ class CommonDbMixin(object):
|
|||||||
query = self._model_query(context, model)
|
query = self._model_query(context, model)
|
||||||
return query.filter(model.name == name).one()
|
return query.filter(model.name == name).one()
|
||||||
except orm_exc.NoResultFound:
|
except orm_exc.NoResultFound:
|
||||||
LOG.info(_("No result found for %(name)s in %(model)s table"),
|
LOG.info("No result found for %(name)s in %(model)s table",
|
||||||
{'name': name, 'model': model})
|
{'name': name, 'model': model})
|
||||||
|
@ -155,7 +155,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
|
|||||||
return dict((arg.key, arg.value) for arg in dev_attrs_db)
|
return dict((arg.key, arg.value) for arg in dev_attrs_db)
|
||||||
|
|
||||||
def _make_ns_dict(self, ns_db, fields=None):
|
def _make_ns_dict(self, ns_db, fields=None):
|
||||||
LOG.debug(_('ns_db %s'), ns_db)
|
LOG.debug('ns_db %s', ns_db)
|
||||||
res = {}
|
res = {}
|
||||||
key_list = ('id', 'tenant_id', 'nsd_id', 'name', 'description',
|
key_list = ('id', 'tenant_id', 'nsd_id', 'name', 'description',
|
||||||
'vnf_ids', 'status', 'mgmt_urls', 'error_reason',
|
'vnf_ids', 'status', 'mgmt_urls', 'error_reason',
|
||||||
@ -166,7 +166,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
|
|||||||
def create_nsd(self, context, nsd):
|
def create_nsd(self, context, nsd):
|
||||||
vnfds = nsd['vnfds']
|
vnfds = nsd['vnfds']
|
||||||
nsd = nsd['nsd']
|
nsd = nsd['nsd']
|
||||||
LOG.debug(_('nsd %s'), nsd)
|
LOG.debug('nsd %s', nsd)
|
||||||
tenant_id = self._get_tenant_id_for_create(context, nsd)
|
tenant_id = self._get_tenant_id_for_create(context, nsd)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -191,11 +191,11 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
|
|||||||
raise exceptions.DuplicateEntity(
|
raise exceptions.DuplicateEntity(
|
||||||
_type="nsd",
|
_type="nsd",
|
||||||
entry=e.columns)
|
entry=e.columns)
|
||||||
LOG.debug(_('nsd_db %(nsd_db)s %(attributes)s '),
|
LOG.debug('nsd_db %(nsd_db)s %(attributes)s ',
|
||||||
{'nsd_db': nsd_db,
|
{'nsd_db': nsd_db,
|
||||||
'attributes': nsd_db.attributes})
|
'attributes': nsd_db.attributes})
|
||||||
nsd_dict = self._make_nsd_dict(nsd_db)
|
nsd_dict = self._make_nsd_dict(nsd_db)
|
||||||
LOG.debug(_('nsd_dict %s'), nsd_dict)
|
LOG.debug('nsd_dict %s', nsd_dict)
|
||||||
self._cos_db_plg.create_event(
|
self._cos_db_plg.create_event(
|
||||||
context, res_id=nsd_dict['id'],
|
context, res_id=nsd_dict['id'],
|
||||||
res_type=constants.RES_TYPE_NSD,
|
res_type=constants.RES_TYPE_NSD,
|
||||||
@ -240,7 +240,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
# reference implementation. needs to be overrided by subclass
|
# reference implementation. needs to be overrided by subclass
|
||||||
def create_ns(self, context, ns):
|
def create_ns(self, context, ns):
|
||||||
LOG.debug(_('ns %s'), ns)
|
LOG.debug('ns %s', ns)
|
||||||
ns = ns['ns']
|
ns = ns['ns']
|
||||||
tenant_id = self._get_tenant_id_for_create(context, ns)
|
tenant_id = self._get_tenant_id_for_create(context, ns)
|
||||||
nsd_id = ns['nsd_id']
|
nsd_id = ns['nsd_id']
|
||||||
@ -279,7 +279,7 @@ class NSPluginDb(network_service.NSPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
def create_ns_post(self, context, ns_id, mistral_obj,
|
def create_ns_post(self, context, ns_id, mistral_obj,
|
||||||
vnfd_dict, error_reason):
|
vnfd_dict, error_reason):
|
||||||
LOG.debug(_('ns ID %s'), ns_id)
|
LOG.debug('ns ID %s', ns_id)
|
||||||
output = ast.literal_eval(mistral_obj.output)
|
output = ast.literal_eval(mistral_obj.output)
|
||||||
mgmt_urls = dict()
|
mgmt_urls = dict()
|
||||||
vnf_ids = dict()
|
vnf_ids = dict()
|
||||||
|
@ -21,7 +21,6 @@ from oslo_log import log as logging
|
|||||||
from six import iteritems
|
from six import iteritems
|
||||||
from sqlalchemy import orm
|
from sqlalchemy import orm
|
||||||
from sqlalchemy.orm import exc as orm_exc
|
from sqlalchemy.orm import exc as orm_exc
|
||||||
from tacker._i18n import _
|
|
||||||
from tacker.db import db_base
|
from tacker.db import db_base
|
||||||
from tacker.db import model_base
|
from tacker.db import model_base
|
||||||
from tacker.db import models_v1
|
from tacker.db import models_v1
|
||||||
@ -219,7 +218,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
def create_vnffgd(self, context, vnffgd):
|
def create_vnffgd(self, context, vnffgd):
|
||||||
template = vnffgd['vnffgd']
|
template = vnffgd['vnffgd']
|
||||||
LOG.debug(_('template %s'), template)
|
LOG.debug('template %s', template)
|
||||||
tenant_id = self._get_tenant_id_for_create(context, template)
|
tenant_id = self._get_tenant_id_for_create(context, template)
|
||||||
|
|
||||||
with context.session.begin(subtransactions=True):
|
with context.session.begin(subtransactions=True):
|
||||||
@ -232,7 +231,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
template=template.get('template'))
|
template=template.get('template'))
|
||||||
context.session.add(template_db)
|
context.session.add(template_db)
|
||||||
|
|
||||||
LOG.debug(_('template_db %(template_db)s'),
|
LOG.debug('template_db %(template_db)s',
|
||||||
{'template_db': template_db})
|
{'template_db': template_db})
|
||||||
return self._make_template_dict(template_db)
|
return self._make_template_dict(template_db)
|
||||||
|
|
||||||
@ -322,7 +321,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
# called internally, not by REST API
|
# called internally, not by REST API
|
||||||
def _create_vnffg_pre(self, context, vnffg):
|
def _create_vnffg_pre(self, context, vnffg):
|
||||||
vnffg = vnffg['vnffg']
|
vnffg = vnffg['vnffg']
|
||||||
LOG.debug(_('vnffg %s'), vnffg)
|
LOG.debug('vnffg %s', vnffg)
|
||||||
tenant_id = self._get_tenant_id_for_create(context, vnffg)
|
tenant_id = self._get_tenant_id_for_create(context, vnffg)
|
||||||
name = vnffg.get('name')
|
name = vnffg.get('name')
|
||||||
vnffg_id = vnffg.get('id') or str(uuid.uuid4())
|
vnffg_id = vnffg.get('id') or str(uuid.uuid4())
|
||||||
@ -332,7 +331,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
with context.session.begin(subtransactions=True):
|
with context.session.begin(subtransactions=True):
|
||||||
template_db = self._get_resource(context, VnffgTemplate,
|
template_db = self._get_resource(context, VnffgTemplate,
|
||||||
template_id)
|
template_id)
|
||||||
LOG.debug(_('vnffg template %s'), template_db)
|
LOG.debug('vnffg template %s', template_db)
|
||||||
|
|
||||||
if vnffg.get('attributes') and \
|
if vnffg.get('attributes') and \
|
||||||
vnffg['attributes'].get('param_values'):
|
vnffg['attributes'].get('param_values'):
|
||||||
@ -346,13 +345,13 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
vnf_members = self._get_vnffg_property(template_db.template,
|
vnf_members = self._get_vnffg_property(template_db.template,
|
||||||
'constituent_vnfs')
|
'constituent_vnfs')
|
||||||
LOG.debug(_('Constituent VNFs: %s'), vnf_members)
|
LOG.debug('Constituent VNFs: %s', vnf_members)
|
||||||
vnf_mapping = self._get_vnf_mapping(context, vnffg.get(
|
vnf_mapping = self._get_vnf_mapping(context, vnffg.get(
|
||||||
'vnf_mapping'), vnf_members)
|
'vnf_mapping'), vnf_members)
|
||||||
LOG.debug(_('VNF Mapping: %s'), vnf_mapping)
|
LOG.debug('VNF Mapping: %s', vnf_mapping)
|
||||||
# create NFP dict
|
# create NFP dict
|
||||||
nfp_dict = self._create_nfp_pre(template_db)
|
nfp_dict = self._create_nfp_pre(template_db)
|
||||||
LOG.debug(_('NFP: %s'), nfp_dict)
|
LOG.debug('NFP: %s', nfp_dict)
|
||||||
vnffg_db = Vnffg(id=vnffg_id,
|
vnffg_db = Vnffg(id=vnffg_id,
|
||||||
tenant_id=tenant_id,
|
tenant_id=tenant_id,
|
||||||
name=name,
|
name=name,
|
||||||
@ -377,7 +376,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
chain = self._create_port_chain(context, vnf_mapping, template_db,
|
chain = self._create_port_chain(context, vnf_mapping, template_db,
|
||||||
nfp_dict['name'])
|
nfp_dict['name'])
|
||||||
LOG.debug(_('chain: %s'), chain)
|
LOG.debug('chain: %s', chain)
|
||||||
sfc_db = VnffgChain(id=sfc_id,
|
sfc_db = VnffgChain(id=sfc_id,
|
||||||
tenant_id=tenant_id,
|
tenant_id=tenant_id,
|
||||||
status=constants.PENDING_CREATE,
|
status=constants.PENDING_CREATE,
|
||||||
@ -398,7 +397,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
match = self._policy_to_acl_criteria(context, template_db,
|
match = self._policy_to_acl_criteria(context, template_db,
|
||||||
nfp_dict['name'],
|
nfp_dict['name'],
|
||||||
vnf_mapping)
|
vnf_mapping)
|
||||||
LOG.debug(_('acl_match %s'), match)
|
LOG.debug('acl_match %s', match)
|
||||||
|
|
||||||
match_db_table = ACLMatchCriteria(
|
match_db_table = ACLMatchCriteria(
|
||||||
id=str(uuid.uuid4()),
|
id=str(uuid.uuid4()),
|
||||||
@ -502,7 +501,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
attr_val = VnffgPluginDbMixin._search_value(
|
attr_val = VnffgPluginDbMixin._search_value(
|
||||||
template['node_templates'][nfp], attribute)
|
template['node_templates'][nfp], attribute)
|
||||||
if attr_val is None:
|
if attr_val is None:
|
||||||
LOG.debug(_('NFP %(nfp)s, attr %(attr)s'),
|
LOG.debug('NFP %(nfp)s, attr %(attr)s',
|
||||||
{'nfp': template['node_templates'][nfp],
|
{'nfp': template['node_templates'][nfp],
|
||||||
'attr': attribute})
|
'attr': attribute})
|
||||||
raise nfvo.NfpAttributeNotFoundException(attribute=attribute)
|
raise nfvo.NfpAttributeNotFoundException(attribute=attribute)
|
||||||
@ -546,14 +545,14 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
# that match VNFD
|
# that match VNFD
|
||||||
if vnf_mapping is None or vnfd not in vnf_mapping.keys():
|
if vnf_mapping is None or vnfd not in vnf_mapping.keys():
|
||||||
# find suitable VNFs from vnfd_id
|
# find suitable VNFs from vnfd_id
|
||||||
LOG.debug(_('Searching VNFS with id %s'), vnfd_id)
|
LOG.debug('Searching VNFS with id %s', vnfd_id)
|
||||||
vnf_list = vnfm_plugin.get_vnfs(context,
|
vnf_list = vnfm_plugin.get_vnfs(context,
|
||||||
{'vnfd_id': [vnfd_id]},
|
{'vnfd_id': [vnfd_id]},
|
||||||
fields=['id'])
|
fields=['id'])
|
||||||
if len(vnf_list) == 0:
|
if len(vnf_list) == 0:
|
||||||
raise nfvo.VnffgInvalidMappingException(vnfd_name=vnfd)
|
raise nfvo.VnffgInvalidMappingException(vnfd_name=vnfd)
|
||||||
else:
|
else:
|
||||||
LOG.debug(_('Matching VNFs found %s'), vnf_list)
|
LOG.debug('Matching VNFs found %s', vnf_list)
|
||||||
vnf_list = [vnf['id'] for vnf in vnf_list]
|
vnf_list = [vnf['id'] for vnf in vnf_list]
|
||||||
if len(vnf_list) > 1:
|
if len(vnf_list) > 1:
|
||||||
new_mapping[vnfd] = random.choice(vnf_list)
|
new_mapping[vnfd] = random.choice(vnf_list)
|
||||||
@ -581,7 +580,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
:param vnfs: List of VNF instance IDs
|
:param vnfs: List of VNF instance IDs
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
LOG.debug(_('validating vim for vnfs %s'), vnfs)
|
LOG.debug('validating vim for vnfs %s', vnfs)
|
||||||
vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM']
|
vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM']
|
||||||
vim_id = None
|
vim_id = None
|
||||||
for vnf in vnfs:
|
for vnf in vnfs:
|
||||||
@ -670,9 +669,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
# instance_id = None means error on creation
|
# instance_id = None means error on creation
|
||||||
def _create_vnffg_post(self, context, sfc_instance_id,
|
def _create_vnffg_post(self, context, sfc_instance_id,
|
||||||
fc_instance_id, vnffg_dict):
|
fc_instance_id, vnffg_dict):
|
||||||
LOG.debug(_('SFC created instance is %s'), sfc_instance_id)
|
LOG.debug('SFC created instance is %s', sfc_instance_id)
|
||||||
LOG.debug(_('Flow Classifier created instance is %s'),
|
LOG.debug('Flow Classifier created instance is %s', fc_instance_id)
|
||||||
fc_instance_id)
|
|
||||||
nfp_dict = self.get_nfp(context, vnffg_dict['forwarding_paths'])
|
nfp_dict = self.get_nfp(context, vnffg_dict['forwarding_paths'])
|
||||||
sfc_id = nfp_dict['chain_id']
|
sfc_id = nfp_dict['chain_id']
|
||||||
classifier_id = nfp_dict['classifier_id']
|
classifier_id = nfp_dict['classifier_id']
|
||||||
@ -723,8 +721,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
nfp_query.update({'status': status})
|
nfp_query.update({'status': status})
|
||||||
|
|
||||||
def _make_vnffg_dict(self, vnffg_db, fields=None):
|
def _make_vnffg_dict(self, vnffg_db, fields=None):
|
||||||
LOG.debug(_('vnffg_db %s'), vnffg_db)
|
LOG.debug('vnffg_db %s', vnffg_db)
|
||||||
LOG.debug(_('vnffg_db nfp %s'), vnffg_db.forwarding_paths)
|
LOG.debug('vnffg_db nfp %s', vnffg_db.forwarding_paths)
|
||||||
res = {
|
res = {
|
||||||
'forwarding_paths': vnffg_db.forwarding_paths[0]['id']
|
'forwarding_paths': vnffg_db.forwarding_paths[0]['id']
|
||||||
}
|
}
|
||||||
@ -917,8 +915,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
if entry[key]}
|
if entry[key]}
|
||||||
|
|
||||||
def _make_classifier_dict(self, classifier_db, fields=None):
|
def _make_classifier_dict(self, classifier_db, fields=None):
|
||||||
LOG.debug(_('classifier_db %s'), classifier_db)
|
LOG.debug('classifier_db %s', classifier_db)
|
||||||
LOG.debug(_('classifier_db match %s'), classifier_db.match)
|
LOG.debug('classifier_db match %s', classifier_db.match)
|
||||||
res = {
|
res = {
|
||||||
'match': self._make_acl_match_dict(classifier_db.match)
|
'match': self._make_acl_match_dict(classifier_db.match)
|
||||||
}
|
}
|
||||||
@ -928,7 +926,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
return self._fields(res, fields)
|
return self._fields(res, fields)
|
||||||
|
|
||||||
def _make_nfp_dict(self, nfp_db, fields=None):
|
def _make_nfp_dict(self, nfp_db, fields=None):
|
||||||
LOG.debug(_('nfp_db %s'), nfp_db)
|
LOG.debug('nfp_db %s', nfp_db)
|
||||||
res = {'chain_id': nfp_db.chain['id'],
|
res = {'chain_id': nfp_db.chain['id'],
|
||||||
'classifier_id': nfp_db.classifier['id']}
|
'classifier_id': nfp_db.classifier['id']}
|
||||||
key_list = ('name', 'id', 'tenant_id', 'symmetrical', 'status',
|
key_list = ('name', 'id', 'tenant_id', 'symmetrical', 'status',
|
||||||
@ -937,7 +935,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
|||||||
return self._fields(res, fields)
|
return self._fields(res, fields)
|
||||||
|
|
||||||
def _make_chain_dict(self, chain_db, fields=None):
|
def _make_chain_dict(self, chain_db, fields=None):
|
||||||
LOG.debug(_('chain_db %s'), chain_db)
|
LOG.debug('chain_db %s', chain_db)
|
||||||
res = {}
|
res = {}
|
||||||
key_list = ('id', 'tenant_id', 'symmetrical', 'status', 'chain',
|
key_list = ('id', 'tenant_id', 'symmetrical', 'status', 'chain',
|
||||||
'path_id', 'nfp_id', 'instance_id')
|
'path_id', 'nfp_id', 'instance_id')
|
||||||
|
@ -216,8 +216,8 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
return dict((arg.key, arg.value) for arg in dev_attrs_db)
|
return dict((arg.key, arg.value) for arg in dev_attrs_db)
|
||||||
|
|
||||||
def _make_vnf_dict(self, vnf_db, fields=None):
|
def _make_vnf_dict(self, vnf_db, fields=None):
|
||||||
LOG.debug(_('vnf_db %s'), vnf_db)
|
LOG.debug('vnf_db %s', vnf_db)
|
||||||
LOG.debug(_('vnf_db attributes %s'), vnf_db.attributes)
|
LOG.debug('vnf_db attributes %s', vnf_db.attributes)
|
||||||
res = {
|
res = {
|
||||||
'vnfd':
|
'vnfd':
|
||||||
self._make_vnfd_dict(vnf_db.vnfd),
|
self._make_vnfd_dict(vnf_db.vnfd),
|
||||||
@ -239,14 +239,14 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
def create_vnfd(self, context, vnfd):
|
def create_vnfd(self, context, vnfd):
|
||||||
vnfd = vnfd['vnfd']
|
vnfd = vnfd['vnfd']
|
||||||
LOG.debug(_('vnfd %s'), vnfd)
|
LOG.debug('vnfd %s', vnfd)
|
||||||
tenant_id = self._get_tenant_id_for_create(context, vnfd)
|
tenant_id = self._get_tenant_id_for_create(context, vnfd)
|
||||||
service_types = vnfd.get('service_types')
|
service_types = vnfd.get('service_types')
|
||||||
mgmt_driver = vnfd.get('mgmt_driver')
|
mgmt_driver = vnfd.get('mgmt_driver')
|
||||||
template_source = vnfd.get("template_source")
|
template_source = vnfd.get("template_source")
|
||||||
|
|
||||||
if (not attributes.is_attr_set(service_types)):
|
if (not attributes.is_attr_set(service_types)):
|
||||||
LOG.debug(_('service types unspecified'))
|
LOG.debug('service types unspecified')
|
||||||
raise vnfm.ServiceTypesNotSpecified()
|
raise vnfm.ServiceTypesNotSpecified()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -280,11 +280,11 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
raise exceptions.DuplicateEntity(
|
raise exceptions.DuplicateEntity(
|
||||||
_type="vnfd",
|
_type="vnfd",
|
||||||
entry=e.columns)
|
entry=e.columns)
|
||||||
LOG.debug(_('vnfd_db %(vnfd_db)s %(attributes)s '),
|
LOG.debug('vnfd_db %(vnfd_db)s %(attributes)s ',
|
||||||
{'vnfd_db': vnfd_db,
|
{'vnfd_db': vnfd_db,
|
||||||
'attributes': vnfd_db.attributes})
|
'attributes': vnfd_db.attributes})
|
||||||
vnfd_dict = self._make_vnfd_dict(vnfd_db)
|
vnfd_dict = self._make_vnfd_dict(vnfd_db)
|
||||||
LOG.debug(_('vnfd_dict %s'), vnfd_dict)
|
LOG.debug('vnfd_dict %s', vnfd_dict)
|
||||||
self._cos_db_plg.create_event(
|
self._cos_db_plg.create_event(
|
||||||
context, res_id=vnfd_dict['id'],
|
context, res_id=vnfd_dict['id'],
|
||||||
res_type=constants.RES_TYPE_VNFD,
|
res_type=constants.RES_TYPE_VNFD,
|
||||||
@ -352,7 +352,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
def choose_vnfd(self, context, service_type,
|
def choose_vnfd(self, context, service_type,
|
||||||
required_attributes=None):
|
required_attributes=None):
|
||||||
required_attributes = required_attributes or []
|
required_attributes = required_attributes or []
|
||||||
LOG.debug(_('required_attributes %s'), required_attributes)
|
LOG.debug('required_attributes %s', required_attributes)
|
||||||
with context.session.begin(subtransactions=True):
|
with context.session.begin(subtransactions=True):
|
||||||
query = (
|
query = (
|
||||||
context.session.query(VNFD).
|
context.session.query(VNFD).
|
||||||
@ -368,7 +368,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
VNFD.id ==
|
VNFD.id ==
|
||||||
VNFDAttribute.vnfd_id,
|
VNFDAttribute.vnfd_id,
|
||||||
VNFDAttribute.key == key)))
|
VNFDAttribute.key == key)))
|
||||||
LOG.debug(_('statements %s'), query)
|
LOG.debug('statements %s', query)
|
||||||
vnfd_db = query.first()
|
vnfd_db = query.first()
|
||||||
if vnfd_db:
|
if vnfd_db:
|
||||||
return self._make_vnfd_dict(vnfd_db)
|
return self._make_vnfd_dict(vnfd_db)
|
||||||
@ -388,7 +388,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
|
|
||||||
# called internally, not by REST API
|
# called internally, not by REST API
|
||||||
def _create_vnf_pre(self, context, vnf):
|
def _create_vnf_pre(self, context, vnf):
|
||||||
LOG.debug(_('vnf %s'), vnf)
|
LOG.debug('vnf %s', vnf)
|
||||||
tenant_id = self._get_tenant_id_for_create(context, vnf)
|
tenant_id = self._get_tenant_id_for_create(context, vnf)
|
||||||
vnfd_id = vnf['vnfd_id']
|
vnfd_id = vnf['vnfd_id']
|
||||||
name = vnf.get('name')
|
name = vnf.get('name')
|
||||||
@ -435,7 +435,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
# intsance_id = None means error on creation
|
# intsance_id = None means error on creation
|
||||||
def _create_vnf_post(self, context, vnf_id, instance_id,
|
def _create_vnf_post(self, context, vnf_id, instance_id,
|
||||||
mgmt_url, vnf_dict):
|
mgmt_url, vnf_dict):
|
||||||
LOG.debug(_('vnf_dict %s'), vnf_dict)
|
LOG.debug('vnf_dict %s', vnf_dict)
|
||||||
with context.session.begin(subtransactions=True):
|
with context.session.begin(subtransactions=True):
|
||||||
query = (self._model_query(context, VNF).
|
query = (self._model_query(context, VNF).
|
||||||
filter(VNF.id == vnf_id).
|
filter(VNF.id == vnf_id).
|
||||||
@ -656,7 +656,7 @@ class VNFMPluginDb(vnfm.VNFMPluginBase, db_base.CommonDbMixin):
|
|||||||
filter(~VNF.status.in_(exclude_status)).
|
filter(~VNF.status.in_(exclude_status)).
|
||||||
with_lockmode('update').one())
|
with_lockmode('update').one())
|
||||||
except orm_exc.NoResultFound:
|
except orm_exc.NoResultFound:
|
||||||
LOG.warning(_('no vnf found %s'), vnf_id)
|
LOG.warning('no vnf found %s', vnf_id)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
vnf_db.update({'status': new_status})
|
vnf_db.update({'status': new_status})
|
||||||
|
@ -133,11 +133,11 @@ class TackerManager(object):
|
|||||||
plugin_providers = cfg.CONF.service_plugins
|
plugin_providers = cfg.CONF.service_plugins
|
||||||
if 'commonservices' not in plugin_providers:
|
if 'commonservices' not in plugin_providers:
|
||||||
plugin_providers.append('commonservices')
|
plugin_providers.append('commonservices')
|
||||||
LOG.debug(_("Loading service plugins: %s"), plugin_providers)
|
LOG.debug("Loading service plugins: %s", plugin_providers)
|
||||||
for provider in plugin_providers:
|
for provider in plugin_providers:
|
||||||
if provider == '':
|
if provider == '':
|
||||||
continue
|
continue
|
||||||
LOG.info(_("Loading Plugin: %s"), provider)
|
LOG.info("Loading Plugin: %s", provider)
|
||||||
|
|
||||||
plugin_inst = self._get_plugin_instance('tacker.service_plugins',
|
plugin_inst = self._get_plugin_instance('tacker.service_plugins',
|
||||||
provider)
|
provider)
|
||||||
@ -156,8 +156,8 @@ class TackerManager(object):
|
|||||||
# hasattr(plugin_inst, 'agent_notifiers')):
|
# hasattr(plugin_inst, 'agent_notifiers')):
|
||||||
# self.plugin.agent_notifiers.update(plugin_inst.agent_notifiers)
|
# self.plugin.agent_notifiers.update(plugin_inst.agent_notifiers)
|
||||||
|
|
||||||
LOG.debug(_("Successfully loaded %(type)s plugin. "
|
LOG.debug("Successfully loaded %(type)s plugin. "
|
||||||
"Description: %(desc)s"),
|
"Description: %(desc)s",
|
||||||
{"type": plugin_inst.get_plugin_type(),
|
{"type": plugin_inst.get_plugin_type(),
|
||||||
"desc": plugin_inst.get_plugin_description()})
|
"desc": plugin_inst.get_plugin_description()})
|
||||||
|
|
||||||
|
@ -154,7 +154,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
try:
|
try:
|
||||||
keystone_version = self.keystone.get_version(auth_url)
|
keystone_version = self.keystone.get_version(auth_url)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error(_('VIM Auth URL invalid'))
|
LOG.error('VIM Auth URL invalid')
|
||||||
raise nfvo.VimConnectionException(message=e.message)
|
raise nfvo.VimConnectionException(message=e.message)
|
||||||
return keystone_version
|
return keystone_version
|
||||||
|
|
||||||
@ -186,7 +186,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
try:
|
try:
|
||||||
regions_list = self._find_regions(ks_client)
|
regions_list = self._find_regions(ks_client)
|
||||||
except (exceptions.Unauthorized, exceptions.BadRequest) as e:
|
except (exceptions.Unauthorized, exceptions.BadRequest) as e:
|
||||||
LOG.warning(_("Authorization failed for user"))
|
LOG.warning("Authorization failed for user")
|
||||||
raise nfvo.VimUnauthorizedException(message=e.message)
|
raise nfvo.VimUnauthorizedException(message=e.message)
|
||||||
vim_obj['placement_attr'] = {'regions': regions_list}
|
vim_obj['placement_attr'] = {'regions': regions_list}
|
||||||
return vim_obj
|
return vim_obj
|
||||||
@ -309,7 +309,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
resources = getattr(client, "%s" % cmd)(**cmd_args)[vim_res_name]
|
resources = getattr(client, "%s" % cmd)(**cmd_args)[vim_res_name]
|
||||||
LOG.debug(_('resources output %s'), resources)
|
LOG.debug('resources output %s', resources)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise nfvo.VimGetResourceException(
|
raise nfvo.VimGetResourceException(
|
||||||
cmd=cmd, name=resource_name, type=resource_type)
|
cmd=cmd, name=resource_name, type=resource_type)
|
||||||
@ -351,13 +351,13 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if not auth_attr:
|
if not auth_attr:
|
||||||
LOG.warning(_("auth information required for n-sfc driver"))
|
LOG.warning("auth information required for n-sfc driver")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if symmetrical:
|
if symmetrical:
|
||||||
LOG.warning(_("n-sfc driver does not support symmetrical"))
|
LOG.warning("n-sfc driver does not support symmetrical")
|
||||||
raise NotImplementedError('symmetrical chain not supported')
|
raise NotImplementedError('symmetrical chain not supported')
|
||||||
LOG.debug(_('fc passed is %s'), fc)
|
LOG.debug('fc passed is %s', fc)
|
||||||
sfc_classifier_params = {}
|
sfc_classifier_params = {}
|
||||||
for field in fc:
|
for field in fc:
|
||||||
if field in FC_MAP:
|
if field in FC_MAP:
|
||||||
@ -368,10 +368,10 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
raise ValueError('protocol %s not supported' % fc[field])
|
raise ValueError('protocol %s not supported' % fc[field])
|
||||||
sfc_classifier_params['protocol'] = protocol
|
sfc_classifier_params['protocol'] = protocol
|
||||||
else:
|
else:
|
||||||
LOG.warning(_("flow classifier %s not supported by "
|
LOG.warning("flow classifier %s not supported by "
|
||||||
"networking-sfc driver"), field)
|
"networking-sfc driver", field)
|
||||||
|
|
||||||
LOG.debug(_('sfc_classifier_params is %s'), sfc_classifier_params)
|
LOG.debug('sfc_classifier_params is %s', sfc_classifier_params)
|
||||||
if len(sfc_classifier_params) > 0:
|
if len(sfc_classifier_params) > 0:
|
||||||
neutronclient_ = NeutronClient(auth_attr)
|
neutronclient_ = NeutronClient(auth_attr)
|
||||||
|
|
||||||
@ -384,11 +384,11 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
def create_chain(self, name, fc_id, vnfs, symmetrical=False,
|
def create_chain(self, name, fc_id, vnfs, symmetrical=False,
|
||||||
auth_attr=None):
|
auth_attr=None):
|
||||||
if not auth_attr:
|
if not auth_attr:
|
||||||
LOG.warning(_("auth information required for n-sfc driver"))
|
LOG.warning("auth information required for n-sfc driver")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if symmetrical:
|
if symmetrical:
|
||||||
LOG.warning(_("n-sfc driver does not support symmetrical"))
|
LOG.warning("n-sfc driver does not support symmetrical")
|
||||||
raise NotImplementedError('symmetrical chain not supported')
|
raise NotImplementedError('symmetrical chain not supported')
|
||||||
|
|
||||||
neutronclient_ = NeutronClient(auth_attr)
|
neutronclient_ = NeutronClient(auth_attr)
|
||||||
@ -404,16 +404,16 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
'port pair group for %s' % vnf['name']
|
'port pair group for %s' % vnf['name']
|
||||||
port_pair_group['port_pairs'] = []
|
port_pair_group['port_pairs'] = []
|
||||||
if CONNECTION_POINT not in vnf:
|
if CONNECTION_POINT not in vnf:
|
||||||
LOG.warning(_("Chain creation failed due to missing "
|
LOG.warning("Chain creation failed due to missing "
|
||||||
"connection point info in VNF "
|
"connection point info in VNF "
|
||||||
"%(vnfname)s"), {'vnfname': vnf['name']})
|
"%(vnfname)s", {'vnfname': vnf['name']})
|
||||||
return None
|
return None
|
||||||
cp_list = vnf[CONNECTION_POINT]
|
cp_list = vnf[CONNECTION_POINT]
|
||||||
num_cps = len(cp_list)
|
num_cps = len(cp_list)
|
||||||
if num_cps != 1 and num_cps != 2:
|
if num_cps != 1 and num_cps != 2:
|
||||||
LOG.warning(_("Chain creation failed due to wrong number of "
|
LOG.warning("Chain creation failed due to wrong number of "
|
||||||
"connection points: expected [1 | 2], got "
|
"connection points: expected [1 | 2], got "
|
||||||
"%(cps)d"), {'cps': num_cps})
|
"%(cps)d", {'cps': num_cps})
|
||||||
return None
|
return None
|
||||||
port_pair = {}
|
port_pair = {}
|
||||||
port_pair['name'] = vnf['name'] + '-connection-points'
|
port_pair['name'] = vnf['name'] + '-connection-points'
|
||||||
@ -426,16 +426,16 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
port_pair['egress'] = cp_list[1]
|
port_pair['egress'] = cp_list[1]
|
||||||
port_pair_id = neutronclient_.port_pair_create(port_pair)
|
port_pair_id = neutronclient_.port_pair_create(port_pair)
|
||||||
if not port_pair_id:
|
if not port_pair_id:
|
||||||
LOG.warning(_("Chain creation failed due to port pair creation"
|
LOG.warning("Chain creation failed due to port pair creation"
|
||||||
" failed for vnf %(vnf)s"), {'vnf': vnf['name']})
|
" failed for vnf %(vnf)s", {'vnf': vnf['name']})
|
||||||
return None
|
return None
|
||||||
port_pair_group['port_pairs'].append(port_pair_id)
|
port_pair_group['port_pairs'].append(port_pair_id)
|
||||||
port_pair_group_id = \
|
port_pair_group_id = \
|
||||||
neutronclient_.port_pair_group_create(port_pair_group)
|
neutronclient_.port_pair_group_create(port_pair_group)
|
||||||
if not port_pair_group_id:
|
if not port_pair_group_id:
|
||||||
LOG.warning(_("Chain creation failed due to port pair group "
|
LOG.warning("Chain creation failed due to port pair group "
|
||||||
"creation failed for vnf "
|
"creation failed for vnf "
|
||||||
"%(vnf)s"), {'vnf': vnf['name']})
|
"%(vnf)s", {'vnf': vnf['name']})
|
||||||
return None
|
return None
|
||||||
port_pair_group_list.append(port_pair_group_id)
|
port_pair_group_list.append(port_pair_group_id)
|
||||||
|
|
||||||
@ -455,12 +455,12 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
# it will look it up (or reconstruct) from
|
# it will look it up (or reconstruct) from
|
||||||
# networking-sfc DB --- but the caveat is that
|
# networking-sfc DB --- but the caveat is that
|
||||||
# the VNF name MUST be unique
|
# the VNF name MUST be unique
|
||||||
LOG.warning(_("n-sfc driver does not support sf chain update"))
|
LOG.warning("n-sfc driver does not support sf chain update")
|
||||||
raise NotImplementedError('sf chain update not supported')
|
raise NotImplementedError('sf chain update not supported')
|
||||||
|
|
||||||
def delete_chain(self, chain_id, auth_attr=None):
|
def delete_chain(self, chain_id, auth_attr=None):
|
||||||
if not auth_attr:
|
if not auth_attr:
|
||||||
LOG.warning(_("auth information required for n-sfc driver"))
|
LOG.warning("auth information required for n-sfc driver")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
neutronclient_ = NeutronClient(auth_attr)
|
neutronclient_ = NeutronClient(auth_attr)
|
||||||
@ -469,11 +469,11 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
def update_flow_classifier(self, fc_id, fc,
|
def update_flow_classifier(self, fc_id, fc,
|
||||||
symmetrical=False, auth_attr=None):
|
symmetrical=False, auth_attr=None):
|
||||||
if not auth_attr:
|
if not auth_attr:
|
||||||
LOG.warning(_("auth information required for n-sfc driver"))
|
LOG.warning("auth information required for n-sfc driver")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if symmetrical:
|
if symmetrical:
|
||||||
LOG.warning(_("n-sfc driver does not support symmetrical"))
|
LOG.warning("n-sfc driver does not support symmetrical")
|
||||||
raise NotImplementedError('symmetrical chain not supported')
|
raise NotImplementedError('symmetrical chain not supported')
|
||||||
|
|
||||||
# for now, the only parameters allowed for flow-classifier-update
|
# for now, the only parameters allowed for flow-classifier-update
|
||||||
@ -483,7 +483,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
sfc_classifier_params['name'] = fc['name']
|
sfc_classifier_params['name'] = fc['name']
|
||||||
sfc_classifier_params['description'] = fc['description']
|
sfc_classifier_params['description'] = fc['description']
|
||||||
|
|
||||||
LOG.debug(_('sfc_classifier_params is %s'), sfc_classifier_params)
|
LOG.debug('sfc_classifier_params is %s', sfc_classifier_params)
|
||||||
|
|
||||||
neutronclient_ = NeutronClient(auth_attr)
|
neutronclient_ = NeutronClient(auth_attr)
|
||||||
return neutronclient_.flow_classifier_update(fc_id,
|
return neutronclient_.flow_classifier_update(fc_id,
|
||||||
@ -491,7 +491,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
|
|
||||||
def delete_flow_classifier(self, fc_id, auth_attr=None):
|
def delete_flow_classifier(self, fc_id, auth_attr=None):
|
||||||
if not auth_attr:
|
if not auth_attr:
|
||||||
LOG.warning(_("auth information required for n-sfc driver"))
|
LOG.warning("auth information required for n-sfc driver")
|
||||||
raise EnvironmentError('auth attribute required for'
|
raise EnvironmentError('auth attribute required for'
|
||||||
' networking-sfc driver')
|
' networking-sfc driver')
|
||||||
|
|
||||||
@ -500,7 +500,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
|||||||
|
|
||||||
def get_mistral_client(self, auth_dict):
|
def get_mistral_client(self, auth_dict):
|
||||||
if not auth_dict:
|
if not auth_dict:
|
||||||
LOG.warning(_("auth dict required to instantiate mistral client"))
|
LOG.warning("auth dict required to instantiate mistral client")
|
||||||
raise EnvironmentError('auth dict required for'
|
raise EnvironmentError('auth dict required for'
|
||||||
' mistral workflow driver')
|
' mistral workflow driver')
|
||||||
return mistral_client.MistralClient(
|
return mistral_client.MistralClient(
|
||||||
@ -547,7 +547,7 @@ class NeutronClient(object):
|
|||||||
self.client = neutron_client.Client(session=sess)
|
self.client = neutron_client.Client(session=sess)
|
||||||
|
|
||||||
def flow_classifier_create(self, fc_dict):
|
def flow_classifier_create(self, fc_dict):
|
||||||
LOG.debug(_("fc_dict passed is {fc_dict}").format(fc_dict=fc_dict))
|
LOG.debug("fc_dict passed is {fc_dict}".format(fc_dict=fc_dict))
|
||||||
fc = self.client.create_flow_classifier({'flow_classifier': fc_dict})
|
fc = self.client.create_flow_classifier({'flow_classifier': fc_dict})
|
||||||
if fc:
|
if fc:
|
||||||
return fc['flow_classifier']['id']
|
return fc['flow_classifier']['id']
|
||||||
@ -562,14 +562,14 @@ class NeutronClient(object):
|
|||||||
try:
|
try:
|
||||||
self.client.delete_flow_classifier(fc_id)
|
self.client.delete_flow_classifier(fc_id)
|
||||||
except nc_exceptions.NotFound:
|
except nc_exceptions.NotFound:
|
||||||
LOG.warning(_("fc %s not found"), fc_id)
|
LOG.warning("fc %s not found", fc_id)
|
||||||
raise ValueError('fc %s not found' % fc_id)
|
raise ValueError('fc %s not found' % fc_id)
|
||||||
|
|
||||||
def port_pair_create(self, port_pair_dict):
|
def port_pair_create(self, port_pair_dict):
|
||||||
try:
|
try:
|
||||||
pp = self.client.create_port_pair({'port_pair': port_pair_dict})
|
pp = self.client.create_port_pair({'port_pair': port_pair_dict})
|
||||||
except nc_exceptions.BadRequest as e:
|
except nc_exceptions.BadRequest as e:
|
||||||
LOG.error(_("create port pair returns %s"), e)
|
LOG.error("create port pair returns %s", e)
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
if pp and len(pp):
|
if pp and len(pp):
|
||||||
@ -581,7 +581,7 @@ class NeutronClient(object):
|
|||||||
try:
|
try:
|
||||||
self.client.delete_port_pair(port_pair_id)
|
self.client.delete_port_pair(port_pair_id)
|
||||||
except nc_exceptions.NotFound:
|
except nc_exceptions.NotFound:
|
||||||
LOG.warning(_('port pair %s not found'), port_pair_id)
|
LOG.warning('port pair %s not found', port_pair_id)
|
||||||
raise ValueError('port pair %s not found' % port_pair_id)
|
raise ValueError('port pair %s not found' % port_pair_id)
|
||||||
|
|
||||||
def port_pair_group_create(self, ppg_dict):
|
def port_pair_group_create(self, ppg_dict):
|
||||||
@ -589,7 +589,7 @@ class NeutronClient(object):
|
|||||||
ppg = self.client.create_port_pair_group(
|
ppg = self.client.create_port_pair_group(
|
||||||
{'port_pair_group': ppg_dict})
|
{'port_pair_group': ppg_dict})
|
||||||
except nc_exceptions.BadRequest as e:
|
except nc_exceptions.BadRequest as e:
|
||||||
LOG.warning(_('create port pair group returns %s'), e)
|
LOG.warning('create port pair group returns %s', e)
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
if ppg and len(ppg):
|
if ppg and len(ppg):
|
||||||
@ -601,7 +601,7 @@ class NeutronClient(object):
|
|||||||
try:
|
try:
|
||||||
self.client.delete_port_pair_group(ppg_id)
|
self.client.delete_port_pair_group(ppg_id)
|
||||||
except nc_exceptions.NotFound:
|
except nc_exceptions.NotFound:
|
||||||
LOG.warning(_('port pair group %s not found'), ppg_id)
|
LOG.warning('port pair group %s not found', ppg_id)
|
||||||
raise ValueError('port pair group %s not found' % ppg_id)
|
raise ValueError('port pair group %s not found' % ppg_id)
|
||||||
|
|
||||||
def port_chain_create(self, port_chain_dict):
|
def port_chain_create(self, port_chain_dict):
|
||||||
@ -609,7 +609,7 @@ class NeutronClient(object):
|
|||||||
pc = self.client.create_port_chain(
|
pc = self.client.create_port_chain(
|
||||||
{'port_chain': port_chain_dict})
|
{'port_chain': port_chain_dict})
|
||||||
except nc_exceptions.BadRequest as e:
|
except nc_exceptions.BadRequest as e:
|
||||||
LOG.warning(_('create port chain returns %s'), e)
|
LOG.warning('create port chain returns %s', e)
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
if pc and len(pc):
|
if pc and len(pc):
|
||||||
@ -634,5 +634,5 @@ class NeutronClient(object):
|
|||||||
pp_id = port_pairs[j]
|
pp_id = port_pairs[j]
|
||||||
self.client.delete_port_pair(pp_id)
|
self.client.delete_port_pair(pp_id)
|
||||||
except nc_exceptions.NotFound:
|
except nc_exceptions.NotFound:
|
||||||
LOG.warning(_('port chain %s not found'), port_chain_id)
|
LOG.warning('port chain %s not found', port_chain_id)
|
||||||
raise ValueError('port chain %s not found' % port_chain_id)
|
raise ValueError('port chain %s not found' % port_chain_id)
|
||||||
|
@ -48,7 +48,7 @@ class VNFFGNoop(abstract_vnffg_driver.VnffgAbstractDriver):
|
|||||||
@log.log
|
@log.log
|
||||||
def update_chain(self, chain_id, fc_ids, vnfs, auth_attr=None):
|
def update_chain(self, chain_id, fc_ids, vnfs, auth_attr=None):
|
||||||
if chain_id not in self._instances:
|
if chain_id not in self._instances:
|
||||||
LOG.debug(_('Chain not found'))
|
LOG.debug('Chain not found')
|
||||||
raise ValueError('No chain instance %s' % chain_id)
|
raise ValueError('No chain instance %s' % chain_id)
|
||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
@ -64,7 +64,7 @@ class VNFFGNoop(abstract_vnffg_driver.VnffgAbstractDriver):
|
|||||||
@log.log
|
@log.log
|
||||||
def update_flow_classifier(self, fc_id, fc, auth_attr=None):
|
def update_flow_classifier(self, fc_id, fc, auth_attr=None):
|
||||||
if fc_id not in self._instances:
|
if fc_id not in self._instances:
|
||||||
LOG.debug(_('FC not found'))
|
LOG.debug('FC not found')
|
||||||
raise ValueError('No FC instance %s' % fc_id)
|
raise ValueError('No FC instance %s' % fc_id)
|
||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
|
@ -101,7 +101,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
def create_vim(self, context, vim):
|
def create_vim(self, context, vim):
|
||||||
LOG.debug(_('Create vim called with parameters %s'),
|
LOG.debug('Create vim called with parameters %s',
|
||||||
strutils.mask_password(vim))
|
strutils.mask_password(vim))
|
||||||
vim_obj = vim['vim']
|
vim_obj = vim['vim']
|
||||||
vim_type = vim_obj['type']
|
vim_type = vim_obj['type']
|
||||||
@ -212,7 +212,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
'template'
|
'template'
|
||||||
)
|
)
|
||||||
|
|
||||||
LOG.debug(_('template yaml: %s'), template)
|
LOG.debug('template yaml: %s', template)
|
||||||
|
|
||||||
toscautils.updateimports(template)
|
toscautils.updateimports(template)
|
||||||
|
|
||||||
@ -220,7 +220,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
tosca_template.ToscaTemplate(
|
tosca_template.ToscaTemplate(
|
||||||
a_file=False, yaml_dict_tpl=template)
|
a_file=False, yaml_dict_tpl=template)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.exception(_("tosca-parser error: %s"), str(e))
|
LOG.exception("tosca-parser error: %s", str(e))
|
||||||
raise nfvo.ToscaParserFailed(error_msg_details=str(e))
|
raise nfvo.ToscaParserFailed(error_msg_details=str(e))
|
||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
@ -321,7 +321,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
vnffg_dict = super(NfvoPlugin, self)._update_vnffg_pre(context,
|
vnffg_dict = super(NfvoPlugin, self)._update_vnffg_pre(context,
|
||||||
vnffg_id)
|
vnffg_id)
|
||||||
new_vnffg = vnffg['vnffg']
|
new_vnffg = vnffg['vnffg']
|
||||||
LOG.debug(_('vnffg update: %s'), vnffg)
|
LOG.debug('vnffg update: %s', vnffg)
|
||||||
nfp = super(NfvoPlugin, self).get_nfp(context,
|
nfp = super(NfvoPlugin, self).get_nfp(context,
|
||||||
vnffg_dict['forwarding_paths'])
|
vnffg_dict['forwarding_paths'])
|
||||||
sfc = super(NfvoPlugin, self).get_sfc(context, nfp['chain_id'])
|
sfc = super(NfvoPlugin, self).get_sfc(context, nfp['chain_id'])
|
||||||
@ -346,7 +346,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
'vnf_mapping'],
|
'vnf_mapping'],
|
||||||
template_db,
|
template_db,
|
||||||
nfp['name'])
|
nfp['name'])
|
||||||
LOG.debug(_('chain update: %s'), chain)
|
LOG.debug('chain update: %s', chain)
|
||||||
sfc['chain'] = chain
|
sfc['chain'] = chain
|
||||||
sfc['symmetrical'] = new_vnffg['symmetrical']
|
sfc['symmetrical'] = new_vnffg['symmetrical']
|
||||||
vim_obj = self._get_vim_from_vnf(context,
|
vim_obj = self._get_vim_from_vnf(context,
|
||||||
@ -464,7 +464,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
|
|
||||||
f = fernet.Fernet(vim_key)
|
f = fernet.Fernet(vim_key)
|
||||||
if not f:
|
if not f:
|
||||||
LOG.warning(_('Unable to decode VIM auth'))
|
LOG.warning('Unable to decode VIM auth')
|
||||||
raise nfvo.VimNotFoundException(
|
raise nfvo.VimNotFoundException(
|
||||||
'Unable to decode VIM auth key')
|
'Unable to decode VIM auth key')
|
||||||
return f.decrypt(cred)
|
return f.decrypt(cred)
|
||||||
@ -472,10 +472,10 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _find_vim_key(vim_id):
|
def _find_vim_key(vim_id):
|
||||||
key_file = os.path.join(CONF.vim_keys.openstack, vim_id)
|
key_file = os.path.join(CONF.vim_keys.openstack, vim_id)
|
||||||
LOG.debug(_('Attempting to open key file for vim id %s'), vim_id)
|
LOG.debug('Attempting to open key file for vim id %s', vim_id)
|
||||||
with open(key_file, 'r') as f:
|
with open(key_file, 'r') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
LOG.warning(_('VIM id invalid or key not found for %s'), vim_id)
|
LOG.warning('VIM id invalid or key not found for %s', vim_id)
|
||||||
|
|
||||||
def _vim_resource_name_to_id(self, context, resource, name, vnf_id):
|
def _vim_resource_name_to_id(self, context, resource, name, vnf_id):
|
||||||
"""Converts a VIM resource name to its ID
|
"""Converts a VIM resource name to its ID
|
||||||
@ -501,7 +501,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
if isinstance(template, dict):
|
if isinstance(template, dict):
|
||||||
nsd_data['attributes']['nsd'] = yaml.safe_dump(
|
nsd_data['attributes']['nsd'] = yaml.safe_dump(
|
||||||
template)
|
template)
|
||||||
LOG.debug(_('nsd %s'), nsd_data)
|
LOG.debug('nsd %s', nsd_data)
|
||||||
|
|
||||||
self._parse_template_input(context, nsd)
|
self._parse_template_input(context, nsd)
|
||||||
return super(NfvoPlugin, self).create_nsd(
|
return super(NfvoPlugin, self).create_nsd(
|
||||||
@ -512,7 +512,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
nsd_yaml = nsd_dict['attributes'].get('nsd')
|
nsd_yaml = nsd_dict['attributes'].get('nsd')
|
||||||
inner_nsd_dict = yaml.safe_load(nsd_yaml)
|
inner_nsd_dict = yaml.safe_load(nsd_yaml)
|
||||||
nsd['vnfds'] = dict()
|
nsd['vnfds'] = dict()
|
||||||
LOG.debug(_('nsd_dict: %s'), inner_nsd_dict)
|
LOG.debug('nsd_dict: %s', inner_nsd_dict)
|
||||||
|
|
||||||
vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM']
|
vnfm_plugin = manager.TackerManager.get_service_plugins()['VNFM']
|
||||||
vnfd_imports = inner_nsd_dict['imports']
|
vnfd_imports = inner_nsd_dict['imports']
|
||||||
@ -542,7 +542,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
ToscaTemplate(a_file=False,
|
ToscaTemplate(a_file=False,
|
||||||
yaml_dict_tpl=inner_nsd_dict)
|
yaml_dict_tpl=inner_nsd_dict)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.exception(_("tosca-parser error: %s"), str(e))
|
LOG.exception("tosca-parser error: %s", str(e))
|
||||||
raise nfvo.ToscaParserFailed(error_msg_details=str(e))
|
raise nfvo.ToscaParserFailed(error_msg_details=str(e))
|
||||||
finally:
|
finally:
|
||||||
for file_path in new_files:
|
for file_path in new_files:
|
||||||
@ -559,7 +559,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
nsd_dict['name'] = inner_nsd_dict['metadata'].get(
|
nsd_dict['name'] = inner_nsd_dict['metadata'].get(
|
||||||
'template_name', '')
|
'template_name', '')
|
||||||
|
|
||||||
LOG.debug(_('nsd %s'), nsd)
|
LOG.debug('nsd %s', nsd)
|
||||||
|
|
||||||
def _get_vnfd_id(self, vnfd_name, onboarded_vnfds):
|
def _get_vnfd_id(self, vnfd_name, onboarded_vnfds):
|
||||||
for vnfd in onboarded_vnfds:
|
for vnfd in onboarded_vnfds:
|
||||||
@ -648,7 +648,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
workflow=workflow,
|
workflow=workflow,
|
||||||
auth_dict=self.get_auth_dict(context))
|
auth_dict=self.get_auth_dict(context))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.error(_('Error while executing workflow: %s'), ex)
|
LOG.error('Error while executing workflow: %s', ex)
|
||||||
self._vim_drivers.invoke(driver_type,
|
self._vim_drivers.invoke(driver_type,
|
||||||
'delete_workflow',
|
'delete_workflow',
|
||||||
workflow_id=workflow['id'],
|
workflow_id=workflow['id'],
|
||||||
@ -666,7 +666,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
'get_execution',
|
'get_execution',
|
||||||
execution_id=execution_id,
|
execution_id=execution_id,
|
||||||
auth_dict=self.get_auth_dict(context)).state
|
auth_dict=self.get_auth_dict(context)).state
|
||||||
LOG.debug(_('status: %s'), exec_state)
|
LOG.debug('status: %s', exec_state)
|
||||||
if exec_state == 'SUCCESS' or exec_state == 'ERROR':
|
if exec_state == 'SUCCESS' or exec_state == 'ERROR':
|
||||||
break
|
break
|
||||||
mistral_retries = mistral_retries - 1
|
mistral_retries = mistral_retries - 1
|
||||||
@ -740,7 +740,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
kwargs={
|
kwargs={
|
||||||
'ns': ns})
|
'ns': ns})
|
||||||
except nfvo.NoTasksException:
|
except nfvo.NoTasksException:
|
||||||
LOG.warning(_("No VNF deletion task(s)."))
|
LOG.warning("No VNF deletion task(s).")
|
||||||
if workflow:
|
if workflow:
|
||||||
try:
|
try:
|
||||||
mistral_execution = self._vim_drivers.invoke(
|
mistral_execution = self._vim_drivers.invoke(
|
||||||
@ -750,7 +750,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
auth_dict=self.get_auth_dict(context))
|
auth_dict=self.get_auth_dict(context))
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.error(_('Error while executing workflow: %s'), ex)
|
LOG.error('Error while executing workflow: %s', ex)
|
||||||
self._vim_drivers.invoke(driver_type,
|
self._vim_drivers.invoke(driver_type,
|
||||||
'delete_workflow',
|
'delete_workflow',
|
||||||
workflow_id=workflow['id'],
|
workflow_id=workflow['id'],
|
||||||
@ -769,7 +769,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
|||||||
'get_execution',
|
'get_execution',
|
||||||
execution_id=execution_id,
|
execution_id=execution_id,
|
||||||
auth_dict=self.get_auth_dict(context)).state
|
auth_dict=self.get_auth_dict(context)).state
|
||||||
LOG.debug(_('status: %s'), exec_state)
|
LOG.debug('status: %s', exec_state)
|
||||||
if exec_state == 'SUCCESS' or exec_state == 'ERROR':
|
if exec_state == 'SUCCESS' or exec_state == 'ERROR':
|
||||||
break
|
break
|
||||||
mistral_retries -= 1
|
mistral_retries -= 1
|
||||||
|
@ -99,7 +99,7 @@ def updateimports(template):
|
|||||||
|
|
||||||
template['imports'].append(nfvfile)
|
template['imports'].append(nfvfile)
|
||||||
|
|
||||||
LOG.debug(_("%s"), path)
|
LOG.debug(path)
|
||||||
|
|
||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
@ -276,7 +276,7 @@ def post_process_heat_template(heat_tpl, mgmt_ports, metadata,
|
|||||||
heat_dict['outputs'].update(output)
|
heat_dict['outputs'].update(output)
|
||||||
else:
|
else:
|
||||||
heat_dict['outputs'] = output
|
heat_dict['outputs'] = output
|
||||||
LOG.debug(_('Added output for %s'), outputname)
|
LOG.debug('Added output for %s', outputname)
|
||||||
if metadata:
|
if metadata:
|
||||||
for vdu_name, metadata_dict in metadata['vdus'].items():
|
for vdu_name, metadata_dict in metadata['vdus'].items():
|
||||||
heat_dict['resources'][vdu_name]['properties']['metadata'] =\
|
heat_dict['resources'][vdu_name]['properties']['metadata'] =\
|
||||||
@ -396,9 +396,9 @@ def populate_flavor_extra_specs(es_dict, properties, flavor_extra_input):
|
|||||||
error_msg_details=(mval + ":Invalid Input"))
|
error_msg_details=(mval + ":Invalid Input"))
|
||||||
es_dict['hw:mem_page_size'] = mval
|
es_dict['hw:mem_page_size'] = mval
|
||||||
if 'numa_nodes' in properties and 'numa_node_count' in properties:
|
if 'numa_nodes' in properties and 'numa_node_count' in properties:
|
||||||
LOG.warning(_('Both numa_nodes and numa_node_count have been'
|
LOG.warning('Both numa_nodes and numa_node_count have been'
|
||||||
'specified; numa_node definitions will be ignored and'
|
'specified; numa_node definitions will be ignored and'
|
||||||
'numa_node_count will be applied'))
|
'numa_node_count will be applied')
|
||||||
if 'numa_node_count' in properties:
|
if 'numa_node_count' in properties:
|
||||||
es_dict['hw:numa_nodes'] = \
|
es_dict['hw:numa_nodes'] = \
|
||||||
properties['numa_node_count'].value
|
properties['numa_node_count'].value
|
||||||
|
@ -56,7 +56,7 @@ class DeviceNoop(abstract_driver.DeviceAbstractDriver):
|
|||||||
@log.log
|
@log.log
|
||||||
def update(self, plugin, context, vnf_id, vnf_dict, vnf):
|
def update(self, plugin, context, vnf_id, vnf_dict, vnf):
|
||||||
if vnf_id not in self._instances:
|
if vnf_id not in self._instances:
|
||||||
LOG.debug(_('not found'))
|
LOG.debug('not found')
|
||||||
raise ValueError('No instance %s' % vnf_id)
|
raise ValueError('No instance %s' % vnf_id)
|
||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
|
@ -47,8 +47,8 @@ class HeatClient(object):
|
|||||||
try:
|
try:
|
||||||
self.stacks.delete(stack_id)
|
self.stacks.delete(stack_id)
|
||||||
except heatException.HTTPNotFound:
|
except heatException.HTTPNotFound:
|
||||||
LOG.warning(_("Stack %(stack)s created by service chain driver is "
|
LOG.warning("Stack %(stack)s created by service chain driver is "
|
||||||
"not found at cleanup"), {'stack': stack_id})
|
"not found at cleanup", {'stack': stack_id})
|
||||||
|
|
||||||
def get(self, stack_id):
|
def get(self, stack_id):
|
||||||
return self.stacks.get(stack_id)
|
return self.stacks.get(stack_id)
|
||||||
|
@ -95,7 +95,7 @@ class OpenStack(abstract_driver.DeviceAbstractDriver,
|
|||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
def create(self, plugin, context, vnf, auth_attr):
|
def create(self, plugin, context, vnf, auth_attr):
|
||||||
LOG.debug(_('vnf %s'), vnf)
|
LOG.debug('vnf %s', vnf)
|
||||||
|
|
||||||
region_name = vnf.get('placement_attr', {}).get('region_name', None)
|
region_name = vnf.get('placement_attr', {}).get('region_name', None)
|
||||||
heatclient = hc.HeatClient(auth_attr, region_name)
|
heatclient = hc.HeatClient(auth_attr, region_name)
|
||||||
@ -115,9 +115,9 @@ class OpenStack(abstract_driver.DeviceAbstractDriver,
|
|||||||
fields['stack_name'] = name
|
fields['stack_name'] = name
|
||||||
|
|
||||||
# service context is ignored
|
# service context is ignored
|
||||||
LOG.debug(_('service_context: %s'), vnf.get('service_context', []))
|
LOG.debug('service_context: %s', vnf.get('service_context', []))
|
||||||
LOG.debug(_('fields: %s'), fields)
|
LOG.debug('fields: %s', fields)
|
||||||
LOG.debug(_('template: %s'), fields['template'])
|
LOG.debug('template: %s', fields['template'])
|
||||||
stack = heatclient.create(fields)
|
stack = heatclient.create(fields)
|
||||||
|
|
||||||
return stack
|
return stack
|
||||||
@ -137,17 +137,17 @@ class OpenStack(abstract_driver.DeviceAbstractDriver,
|
|||||||
try:
|
try:
|
||||||
stack = heatclient.get(vnf_id)
|
stack = heatclient.get(vnf_id)
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.warning(_("VNF Instance setup may not have "
|
LOG.warning("VNF Instance setup may not have "
|
||||||
"happened because Heat API request failed "
|
"happened because Heat API request failed "
|
||||||
"while waiting for the stack %(stack)s to be "
|
"while waiting for the stack %(stack)s to be "
|
||||||
"created"), {'stack': vnf_id})
|
"created", {'stack': vnf_id})
|
||||||
# continue to avoid temporary connection error to target
|
# continue to avoid temporary connection error to target
|
||||||
# VIM
|
# VIM
|
||||||
status = stack.stack_status
|
status = stack.stack_status
|
||||||
LOG.debug(_('status: %s'), status)
|
LOG.debug('status: %s', status)
|
||||||
stack_retries = stack_retries - 1
|
stack_retries = stack_retries - 1
|
||||||
|
|
||||||
LOG.debug(_('stack status: %(stack)s %(status)s'),
|
LOG.debug('stack status: %(stack)s %(status)s',
|
||||||
{'stack': str(stack), 'status': status})
|
{'stack': str(stack), 'status': status})
|
||||||
if stack_retries == 0 and status != 'CREATE_COMPLETE':
|
if stack_retries == 0 and status != 'CREATE_COMPLETE':
|
||||||
error_reason = _("Resource creation is not completed within"
|
error_reason = _("Resource creation is not completed within"
|
||||||
@ -156,7 +156,7 @@ class OpenStack(abstract_driver.DeviceAbstractDriver,
|
|||||||
wait=(self.STACK_RETRIES *
|
wait=(self.STACK_RETRIES *
|
||||||
self.STACK_RETRY_WAIT),
|
self.STACK_RETRY_WAIT),
|
||||||
stack=vnf_id)
|
stack=vnf_id)
|
||||||
LOG.warning(_("VNF Creation failed: %(reason)s"),
|
LOG.warning("VNF Creation failed: %(reason)s",
|
||||||
{'reason': error_reason})
|
{'reason': error_reason})
|
||||||
raise vnfm.VNFCreateWaitFailed(reason=error_reason)
|
raise vnfm.VNFCreateWaitFailed(reason=error_reason)
|
||||||
|
|
||||||
@ -165,7 +165,7 @@ class OpenStack(abstract_driver.DeviceAbstractDriver,
|
|||||||
raise vnfm.VNFCreateWaitFailed(reason=error_reason)
|
raise vnfm.VNFCreateWaitFailed(reason=error_reason)
|
||||||
|
|
||||||
def _find_mgmt_ips(outputs):
|
def _find_mgmt_ips(outputs):
|
||||||
LOG.debug(_('outputs %s'), outputs)
|
LOG.debug('outputs %s', outputs)
|
||||||
mgmt_ips = dict((output['output_key'][len(OUTPUT_PREFIX):],
|
mgmt_ips = dict((output['output_key'][len(OUTPUT_PREFIX):],
|
||||||
output['output_value'])
|
output['output_value'])
|
||||||
for output in outputs
|
for output in outputs
|
||||||
@ -246,10 +246,10 @@ class OpenStack(abstract_driver.DeviceAbstractDriver,
|
|||||||
except heatException.HTTPNotFound:
|
except heatException.HTTPNotFound:
|
||||||
return
|
return
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.warning(_("VNF Instance cleanup may not have "
|
LOG.warning("VNF Instance cleanup may not have "
|
||||||
"happened because Heat API request failed "
|
"happened because Heat API request failed "
|
||||||
"while waiting for the stack %(stack)s to be "
|
"while waiting for the stack %(stack)s to be "
|
||||||
"deleted"), {'stack': vnf_id})
|
"deleted", {'stack': vnf_id})
|
||||||
# Just like create wait, ignore the exception to
|
# Just like create wait, ignore the exception to
|
||||||
# avoid temporary connection error.
|
# avoid temporary connection error.
|
||||||
status = stack.stack_status
|
status = stack.stack_status
|
||||||
|
@ -95,7 +95,7 @@ class TOSCAToHOT(object):
|
|||||||
self.vnfd_yaml = self.attributes.pop('vnfd', None)
|
self.vnfd_yaml = self.attributes.pop('vnfd', None)
|
||||||
if self.vnfd_yaml is None:
|
if self.vnfd_yaml is None:
|
||||||
# TODO(kangaraj-manickam) raise user level exception
|
# TODO(kangaraj-manickam) raise user level exception
|
||||||
LOG.info(_("VNFD is not provided, so no vnf is created !!"))
|
LOG.info("VNFD is not provided, so no vnf is created !!")
|
||||||
return
|
return
|
||||||
LOG.debug('vnfd_yaml %s', self.vnfd_yaml)
|
LOG.debug('vnfd_yaml %s', self.vnfd_yaml)
|
||||||
|
|
||||||
|
@ -70,16 +70,15 @@ class Keystone(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def create_key_dir(path):
|
def create_key_dir(path):
|
||||||
if not os.access(path, os.F_OK):
|
if not os.access(path, os.F_OK):
|
||||||
LOG.info(_(
|
LOG.info('[fernet_tokens] key_repository does not appear to '
|
||||||
'[fernet_tokens] key_repository does not appear to exist; '
|
'exist; attempting to create it')
|
||||||
'attempting to create it'))
|
|
||||||
try:
|
try:
|
||||||
os.makedirs(path, 0o700)
|
os.makedirs(path, 0o700)
|
||||||
except OSError:
|
except OSError:
|
||||||
LOG.error(_(
|
LOG.error(
|
||||||
'Failed to create [fernet_tokens] key_repository: either'
|
'Failed to create [fernet_tokens] key_repository: either'
|
||||||
'it already exists or you don\'t have sufficient'
|
'it already exists or you don\'t have sufficient'
|
||||||
'permissions to create it'))
|
'permissions to create it')
|
||||||
|
|
||||||
def create_fernet_key(self):
|
def create_fernet_key(self):
|
||||||
fernet_key = fernet.Fernet.generate_key()
|
fernet_key = fernet.Fernet.generate_key()
|
||||||
|
@ -33,9 +33,9 @@ class DeviceMgmtNoop(abstract_driver.DeviceMGMTAbstractDriver):
|
|||||||
return 'Tacker VNFMgmt Noop Driver'
|
return 'Tacker VNFMgmt Noop Driver'
|
||||||
|
|
||||||
def mgmt_url(self, plugin, context, vnf):
|
def mgmt_url(self, plugin, context, vnf):
|
||||||
LOG.debug(_('mgmt_url %s'), vnf)
|
LOG.debug('mgmt_url %s', vnf)
|
||||||
return 'noop-mgmt-url'
|
return 'noop-mgmt-url'
|
||||||
|
|
||||||
def mgmt_call(self, plugin, context, vnf, kwargs):
|
def mgmt_call(self, plugin, context, vnf, kwargs):
|
||||||
LOG.debug(_('mgmt_call %(vnf)s %(kwargs)s'),
|
LOG.debug('mgmt_call %(vnf)s %(kwargs)s',
|
||||||
{'vnf': vnf, 'kwargs': kwargs})
|
{'vnf': vnf, 'kwargs': kwargs})
|
||||||
|
@ -49,7 +49,7 @@ class DeviceMgmtOpenWRT(abstract_driver.DeviceMGMTAbstractDriver):
|
|||||||
return 'Tacker VNFMgmt OpenWRT Driver'
|
return 'Tacker VNFMgmt OpenWRT Driver'
|
||||||
|
|
||||||
def mgmt_url(self, plugin, context, vnf):
|
def mgmt_url(self, plugin, context, vnf):
|
||||||
LOG.debug(_('mgmt_url %s'), vnf)
|
LOG.debug('mgmt_url %s', vnf)
|
||||||
return vnf.get('mgmt_url', '')
|
return vnf.get('mgmt_url', '')
|
||||||
|
|
||||||
@log.log
|
@log.log
|
||||||
@ -58,16 +58,16 @@ class DeviceMgmtOpenWRT(abstract_driver.DeviceMGMTAbstractDriver):
|
|||||||
password = cfg.CONF.openwrt.password
|
password = cfg.CONF.openwrt.password
|
||||||
try:
|
try:
|
||||||
cmd = "uci import %s; /etc/init.d/%s restart" % (service, service)
|
cmd = "uci import %s; /etc/init.d/%s restart" % (service, service)
|
||||||
LOG.debug(_('execute command: %(cmd)s on mgmt_ip_address '
|
LOG.debug('execute command: %(cmd)s on mgmt_ip_address '
|
||||||
'%(mgmt_ip)s'),
|
'%(mgmt_ip)s',
|
||||||
{'cmd': cmd,
|
{'cmd': cmd,
|
||||||
'mgmt_ip': mgmt_ip_address})
|
'mgmt_ip': mgmt_ip_address})
|
||||||
commander = cmd_executer.RemoteCommandExecutor(
|
commander = cmd_executer.RemoteCommandExecutor(
|
||||||
user, password, mgmt_ip_address)
|
user, password, mgmt_ip_address)
|
||||||
commander.execute_command(cmd, input_data=config)
|
commander.execute_command(cmd, input_data=config)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.error(_("While executing command on remote "
|
LOG.error("While executing command on remote "
|
||||||
"%(mgmt_ip)s: %(exception)s"),
|
"%(mgmt_ip)s: %(exception)s",
|
||||||
{'mgmt_ip': mgmt_ip_address,
|
{'mgmt_ip': mgmt_ip_address,
|
||||||
'exception': ex})
|
'exception': ex})
|
||||||
raise exceptions.MgmtDriverException()
|
raise exceptions.MgmtDriverException()
|
||||||
@ -96,8 +96,8 @@ class DeviceMgmtOpenWRT(abstract_driver.DeviceMGMTAbstractDriver):
|
|||||||
continue
|
continue
|
||||||
mgmt_ip_address = mgmt_url.get(vdu, '')
|
mgmt_ip_address = mgmt_url.get(vdu, '')
|
||||||
if not mgmt_ip_address:
|
if not mgmt_ip_address:
|
||||||
LOG.warning(_('tried to configure unknown mgmt '
|
LOG.warning('tried to configure unknown mgmt '
|
||||||
'address on VNF %(vnf)s VDU %(vdu)s'),
|
'address on VNF %(vnf)s VDU %(vdu)s',
|
||||||
{'vnf': vnf.get('name'),
|
{'vnf': vnf.get('name'),
|
||||||
'vdu': vdu})
|
'vdu': vdu})
|
||||||
continue
|
continue
|
||||||
|
@ -60,7 +60,7 @@ class VNFMonitorCeilometer(
|
|||||||
# -name/action-name?key=8785'
|
# -name/action-name?key=8785'
|
||||||
host = cfg.CONF.ceilometer.host
|
host = cfg.CONF.ceilometer.host
|
||||||
port = cfg.CONF.ceilometer.port
|
port = cfg.CONF.ceilometer.port
|
||||||
LOG.info(_("Tacker in heat listening on %(host)s:%(port)s"),
|
LOG.info("Tacker in heat listening on %(host)s:%(port)s",
|
||||||
{'host': host,
|
{'host': host,
|
||||||
'port': port})
|
'port': port})
|
||||||
origin = "http://%(host)s:%(port)s/v1.0/vnfs" % {
|
origin = "http://%(host)s:%(port)s/v1.0/vnfs" % {
|
||||||
|
@ -48,7 +48,7 @@ class VNFMonitorHTTPPing(abstract_driver.VNFMonitorAbstractDriver):
|
|||||||
return 'Tacker HTTP Ping Driver for VNF'
|
return 'Tacker HTTP Ping Driver for VNF'
|
||||||
|
|
||||||
def monitor_url(self, plugin, context, vnf):
|
def monitor_url(self, plugin, context, vnf):
|
||||||
LOG.debug(_('monitor_url %s'), vnf)
|
LOG.debug('monitor_url %s', vnf)
|
||||||
return vnf.get('monitor_url', '')
|
return vnf.get('monitor_url', '')
|
||||||
|
|
||||||
def _is_pingable(self, mgmt_ip='', retry=5, timeout=5, port=80, **kwargs):
|
def _is_pingable(self, mgmt_ip='', retry=5, timeout=5, port=80, **kwargs):
|
||||||
|
@ -47,7 +47,7 @@ class VNFMonitorPing(abstract_driver.VNFMonitorAbstractDriver):
|
|||||||
return 'Tacker VNFMonitor Ping Driver'
|
return 'Tacker VNFMonitor Ping Driver'
|
||||||
|
|
||||||
def monitor_url(self, plugin, context, vnf):
|
def monitor_url(self, plugin, context, vnf):
|
||||||
LOG.debug(_('monitor_url %s'), vnf)
|
LOG.debug('monitor_url %s', vnf)
|
||||||
return vnf.get('monitor_url', '')
|
return vnf.get('monitor_url', '')
|
||||||
|
|
||||||
def _is_pingable(self, mgmt_ip="", count=5, timeout=1, interval='0.2',
|
def _is_pingable(self, mgmt_ip="", count=5, timeout=1, interval='0.2',
|
||||||
|
@ -161,11 +161,11 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
raise exceptions.Invalid('Not a valid template: '
|
raise exceptions.Invalid('Not a valid template: '
|
||||||
'tosca_definitions_version is missing.')
|
'tosca_definitions_version is missing.')
|
||||||
|
|
||||||
LOG.debug(_('vnfd %s'), vnfd_data)
|
LOG.debug('vnfd %s', vnfd_data)
|
||||||
|
|
||||||
service_types = vnfd_data.get('service_types')
|
service_types = vnfd_data.get('service_types')
|
||||||
if not attributes.is_attr_set(service_types):
|
if not attributes.is_attr_set(service_types):
|
||||||
LOG.debug(_('service type must be specified'))
|
LOG.debug('service type must be specified')
|
||||||
raise vnfm.ServiceTypesNotSpecified()
|
raise vnfm.ServiceTypesNotSpecified()
|
||||||
for service_type in service_types:
|
for service_type in service_types:
|
||||||
# TODO(yamahata):
|
# TODO(yamahata):
|
||||||
@ -189,7 +189,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
return
|
return
|
||||||
|
|
||||||
inner_vnfd_dict = yaml.safe_load(vnfd_yaml)
|
inner_vnfd_dict = yaml.safe_load(vnfd_yaml)
|
||||||
LOG.debug(_('vnfd_dict: %s'), inner_vnfd_dict)
|
LOG.debug('vnfd_dict: %s', inner_vnfd_dict)
|
||||||
|
|
||||||
# Prepend the tacker_defs.yaml import file with the full
|
# Prepend the tacker_defs.yaml import file with the full
|
||||||
# path to the file
|
# path to the file
|
||||||
@ -199,7 +199,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
tosca = ToscaTemplate(a_file=False,
|
tosca = ToscaTemplate(a_file=False,
|
||||||
yaml_dict_tpl=inner_vnfd_dict)
|
yaml_dict_tpl=inner_vnfd_dict)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.exception(_("tosca-parser error: %s"), str(e))
|
LOG.exception("tosca-parser error: %s", str(e))
|
||||||
raise vnfm.ToscaParserFailed(error_msg_details=str(e))
|
raise vnfm.ToscaParserFailed(error_msg_details=str(e))
|
||||||
|
|
||||||
if ('description' not in vnfd_dict or
|
if ('description' not in vnfd_dict or
|
||||||
@ -214,7 +214,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
|
|
||||||
vnfd_dict['mgmt_driver'] = toscautils.get_mgmt_driver(
|
vnfd_dict['mgmt_driver'] = toscautils.get_mgmt_driver(
|
||||||
tosca)
|
tosca)
|
||||||
LOG.debug(_('vnfd %s'), vnfd)
|
LOG.debug('vnfd %s', vnfd)
|
||||||
|
|
||||||
def add_vnf_to_monitor(self, context, vnf_dict):
|
def add_vnf_to_monitor(self, context, vnf_dict):
|
||||||
dev_attrs = vnf_dict['attributes']
|
dev_attrs = vnf_dict['attributes']
|
||||||
@ -305,7 +305,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
try:
|
try:
|
||||||
self.mgmt_call(context, vnf_dict, kwargs)
|
self.mgmt_call(context, vnf_dict, kwargs)
|
||||||
except exceptions.MgmtDriverException:
|
except exceptions.MgmtDriverException:
|
||||||
LOG.error(_('VNF configuration failed'))
|
LOG.error('VNF configuration failed')
|
||||||
new_status = constants.ERROR
|
new_status = constants.ERROR
|
||||||
self.set_vnf_error_status_reason(context, vnf_id,
|
self.set_vnf_error_status_reason(context, vnf_id,
|
||||||
'Unable to configure VDU')
|
'Unable to configure VDU')
|
||||||
@ -325,7 +325,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
vnf_dict = self._create_vnf_pre(
|
vnf_dict = self._create_vnf_pre(
|
||||||
context, vnf) if not vnf.get('id') else vnf
|
context, vnf) if not vnf.get('id') else vnf
|
||||||
vnf_id = vnf_dict['id']
|
vnf_id = vnf_dict['id']
|
||||||
LOG.debug(_('vnf_dict %s'), vnf_dict)
|
LOG.debug('vnf_dict %s', vnf_dict)
|
||||||
self.mgmt_create_pre(context, vnf_dict)
|
self.mgmt_create_pre(context, vnf_dict)
|
||||||
self.add_alarm_url_to_vnf(context, vnf_dict)
|
self.add_alarm_url_to_vnf(context, vnf_dict)
|
||||||
try:
|
try:
|
||||||
@ -381,8 +381,8 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
self._report_deprecated_yaml_str()
|
self._report_deprecated_yaml_str()
|
||||||
infra_driver, vim_auth = self._get_infra_driver(context, vnf_info)
|
infra_driver, vim_auth = self._get_infra_driver(context, vnf_info)
|
||||||
if infra_driver not in self._vnf_manager:
|
if infra_driver not in self._vnf_manager:
|
||||||
LOG.debug(_('unknown vim driver '
|
LOG.debug('unknown vim driver '
|
||||||
'%(infra_driver)s in %(drivers)s'),
|
'%(infra_driver)s in %(drivers)s',
|
||||||
{'infra_driver': infra_driver,
|
{'infra_driver': infra_driver,
|
||||||
'drivers': cfg.CONF.tacker.infra_driver})
|
'drivers': cfg.CONF.tacker.infra_driver})
|
||||||
raise vnfm.InvalidInfraDriver(vim_name=infra_driver)
|
raise vnfm.InvalidInfraDriver(vim_name=infra_driver)
|
||||||
@ -422,7 +422,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
region_name=region_name)
|
region_name=region_name)
|
||||||
self.mgmt_call(context, vnf_dict, kwargs)
|
self.mgmt_call(context, vnf_dict, kwargs)
|
||||||
except exceptions.MgmtDriverException as e:
|
except exceptions.MgmtDriverException as e:
|
||||||
LOG.error(_('VNF configuration failed'))
|
LOG.error('VNF configuration failed')
|
||||||
new_status = constants.ERROR
|
new_status = constants.ERROR
|
||||||
self._vnf_monitor.delete_hosting_vnf(vnf_dict['id'])
|
self._vnf_monitor.delete_hosting_vnf(vnf_dict['id'])
|
||||||
self.set_vnf_error_status_reason(context, vnf_dict['id'],
|
self.set_vnf_error_status_reason(context, vnf_dict['id'],
|
||||||
@ -489,7 +489,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
e = e_
|
e = e_
|
||||||
vnf_dict['status'] = constants.ERROR
|
vnf_dict['status'] = constants.ERROR
|
||||||
vnf_dict['error_reason'] = six.text_type(e)
|
vnf_dict['error_reason'] = six.text_type(e)
|
||||||
LOG.exception(_('_delete_vnf_wait'))
|
LOG.exception('_delete_vnf_wait')
|
||||||
self.set_vnf_error_status_reason(context, vnf_dict['id'],
|
self.set_vnf_error_status_reason(context, vnf_dict['id'],
|
||||||
vnf_dict['error_reason'])
|
vnf_dict['error_reason'])
|
||||||
|
|
||||||
@ -553,7 +553,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
policy=policy['id']
|
policy=policy['id']
|
||||||
)
|
)
|
||||||
|
|
||||||
LOG.debug(_("Policy %s is validated successfully"), policy['id'])
|
LOG.debug("Policy %s is validated successfully", policy['id'])
|
||||||
|
|
||||||
def _get_status():
|
def _get_status():
|
||||||
if policy['action'] == constants.ACTION_SCALE_IN:
|
if policy['action'] == constants.ACTION_SCALE_IN:
|
||||||
@ -570,7 +570,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
policy,
|
policy,
|
||||||
[constants.ACTIVE],
|
[constants.ACTIVE],
|
||||||
status)
|
status)
|
||||||
LOG.debug(_("Policy %(policy)s vnf is at %(status)s"),
|
LOG.debug("Policy %(policy)s vnf is at %(status)s",
|
||||||
{'policy': policy['id'],
|
{'policy': policy['id'],
|
||||||
'status': status})
|
'status': status})
|
||||||
return result
|
return result
|
||||||
@ -583,7 +583,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
[status],
|
[status],
|
||||||
new_status,
|
new_status,
|
||||||
mgmt_url)
|
mgmt_url)
|
||||||
LOG.debug(_("Policy %(policy)s vnf is at %(status)s"),
|
LOG.debug("Policy %(policy)s vnf is at %(status)s",
|
||||||
{'policy': policy['id'],
|
{'policy': policy['id'],
|
||||||
'status': new_status})
|
'status': new_status})
|
||||||
return result
|
return result
|
||||||
@ -600,11 +600,11 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
policy=policy,
|
policy=policy,
|
||||||
region_name=region_name
|
region_name=region_name
|
||||||
)
|
)
|
||||||
LOG.debug(_("Policy %s action is started successfully"),
|
LOG.debug("Policy %s action is started successfully",
|
||||||
policy['id'])
|
policy['id'])
|
||||||
return last_event_id
|
return last_event_id
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error(_("Policy %s action is failed to start"),
|
LOG.error("Policy %s action is failed to start",
|
||||||
policy)
|
policy)
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
vnf['status'] = constants.ERROR
|
vnf['status'] = constants.ERROR
|
||||||
@ -617,7 +617,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
# wait
|
# wait
|
||||||
def _vnf_policy_action_wait():
|
def _vnf_policy_action_wait():
|
||||||
try:
|
try:
|
||||||
LOG.debug(_("Policy %s action is in progress"),
|
LOG.debug("Policy %s action is in progress",
|
||||||
policy['id'])
|
policy['id'])
|
||||||
mgmt_url = self._vnf_manager.invoke(
|
mgmt_url = self._vnf_manager.invoke(
|
||||||
infra_driver,
|
infra_driver,
|
||||||
@ -629,12 +629,12 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
region_name=region_name,
|
region_name=region_name,
|
||||||
last_event_id=last_event_id
|
last_event_id=last_event_id
|
||||||
)
|
)
|
||||||
LOG.debug(_("Policy %s action is completed successfully"),
|
LOG.debug("Policy %s action is completed successfully",
|
||||||
policy['id'])
|
policy['id'])
|
||||||
_handle_vnf_scaling_post(constants.ACTIVE, mgmt_url)
|
_handle_vnf_scaling_post(constants.ACTIVE, mgmt_url)
|
||||||
# TODO(kanagaraj-manickam): Add support for config and mgmt
|
# TODO(kanagaraj-manickam): Add support for config and mgmt
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error(_("Policy %s action is failed to complete"),
|
LOG.error("Policy %s action is failed to complete",
|
||||||
policy['id'])
|
policy['id'])
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
self.set_vnf_error_status_reason(
|
self.set_vnf_error_status_reason(
|
||||||
@ -750,7 +750,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
if not policy_:
|
if not policy_:
|
||||||
if action not in constants.DEFAULT_ALARM_ACTIONS:
|
if action not in constants.DEFAULT_ALARM_ACTIONS:
|
||||||
policy_ = self.get_vnf_policy(context, action, vnf_id)
|
policy_ = self.get_vnf_policy(context, action, vnf_id)
|
||||||
LOG.debug(_("Trigger %s is validated successfully"), trigger)
|
LOG.debug("Trigger %s is validated successfully", trigger)
|
||||||
return policy_, action_
|
return policy_, action_
|
||||||
# validate url
|
# validate url
|
||||||
|
|
||||||
@ -781,7 +781,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
vnf_dict = trigger['vnf']
|
vnf_dict = trigger['vnf']
|
||||||
if trigger['action_name'] in constants.DEFAULT_ALARM_ACTIONS:
|
if trigger['action_name'] in constants.DEFAULT_ALARM_ACTIONS:
|
||||||
action = trigger['action_name']
|
action = trigger['action_name']
|
||||||
LOG.debug(_('vnf for monitoring: %s'), vnf_dict)
|
LOG.debug('vnf for monitoring: %s', vnf_dict)
|
||||||
self._vnf_action.invoke(
|
self._vnf_action.invoke(
|
||||||
action, 'execute_action', plugin=self, context=context,
|
action, 'execute_action', plugin=self, context=context,
|
||||||
vnf_dict=vnf_dict, args={})
|
vnf_dict=vnf_dict, args={})
|
||||||
@ -791,8 +791,8 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
|
|||||||
bckend_policy_type = bckend_policy['type']
|
bckend_policy_type = bckend_policy['type']
|
||||||
if bckend_policy_type == constants.POLICY_SCALING:
|
if bckend_policy_type == constants.POLICY_SCALING:
|
||||||
if vnf_dict['status'] != constants.ACTIVE:
|
if vnf_dict['status'] != constants.ACTIVE:
|
||||||
LOG.info(_("Scaling Policy action skipped due to status:"
|
LOG.info("Scaling Policy action skipped due to status:"
|
||||||
" %(status)s for vnf: %(vnfid)s"),
|
" %(status)s for vnf: %(vnfid)s",
|
||||||
{"status": vnf_dict['status'],
|
{"status": vnf_dict['status'],
|
||||||
"vnfid": vnf_dict['id']})
|
"vnfid": vnf_dict['id']})
|
||||||
return
|
return
|
||||||
|
@ -44,7 +44,7 @@ class VNFActionLogOnly(abstract_action.AbstractPolicyAction):
|
|||||||
|
|
||||||
def execute_action(self, plugin, context, vnf_dict, args):
|
def execute_action(self, plugin, context, vnf_dict, args):
|
||||||
vnf_id = vnf_dict['id']
|
vnf_id = vnf_dict['id']
|
||||||
LOG.error(_('vnf %s dead'), vnf_id)
|
LOG.error('vnf %s dead', vnf_id)
|
||||||
_log_monitor_events(context,
|
_log_monitor_events(context,
|
||||||
vnf_dict,
|
vnf_dict,
|
||||||
"ActionLogOnly invoked")
|
"ActionLogOnly invoked")
|
||||||
@ -69,4 +69,4 @@ class VNFActionLogAndKill(abstract_action.AbstractPolicyAction):
|
|||||||
if vnf_dict['attributes'].get('monitoring_policy'):
|
if vnf_dict['attributes'].get('monitoring_policy'):
|
||||||
plugin._vnf_monitor.mark_dead(vnf_dict['id'])
|
plugin._vnf_monitor.mark_dead(vnf_dict['id'])
|
||||||
plugin.delete_vnf(context, vnf_id)
|
plugin.delete_vnf(context, vnf_id)
|
||||||
LOG.error(_('vnf %s dead'), vnf_id)
|
LOG.error('vnf %s dead', vnf_id)
|
||||||
|
@ -46,14 +46,14 @@ class VNFActionRespawn(abstract_action.AbstractPolicyAction):
|
|||||||
|
|
||||||
def execute_action(self, plugin, context, vnf_dict, args):
|
def execute_action(self, plugin, context, vnf_dict, args):
|
||||||
vnf_id = vnf_dict['id']
|
vnf_id = vnf_dict['id']
|
||||||
LOG.info(_('vnf %s is dead and needs to be respawned'), vnf_id)
|
LOG.info('vnf %s is dead and needs to be respawned', vnf_id)
|
||||||
attributes = vnf_dict['attributes']
|
attributes = vnf_dict['attributes']
|
||||||
vim_id = vnf_dict['vim_id']
|
vim_id = vnf_dict['vim_id']
|
||||||
|
|
||||||
def _update_failure_count():
|
def _update_failure_count():
|
||||||
failure_count = int(attributes.get('failure_count', '0')) + 1
|
failure_count = int(attributes.get('failure_count', '0')) + 1
|
||||||
failure_count_str = str(failure_count)
|
failure_count_str = str(failure_count)
|
||||||
LOG.debug(_("vnf %(vnf_id)s failure count %(failure_count)s"),
|
LOG.debug("vnf %(vnf_id)s failure count %(failure_count)s",
|
||||||
{'vnf_id': vnf_id, 'failure_count': failure_count_str})
|
{'vnf_id': vnf_id, 'failure_count': failure_count_str})
|
||||||
attributes['failure_count'] = failure_count_str
|
attributes['failure_count'] = failure_count_str
|
||||||
attributes['dead_instance_id_' + failure_count_str] = vnf_dict[
|
attributes['dead_instance_id_' + failure_count_str] = vnf_dict[
|
||||||
@ -69,13 +69,13 @@ class VNFActionRespawn(abstract_action.AbstractPolicyAction):
|
|||||||
heatclient = hc.HeatClient(auth_attr=vim_auth,
|
heatclient = hc.HeatClient(auth_attr=vim_auth,
|
||||||
region_name=region_name)
|
region_name=region_name)
|
||||||
heatclient.delete(vnf_dict['instance_id'])
|
heatclient.delete(vnf_dict['instance_id'])
|
||||||
LOG.debug(_("Heat stack %s delete initiated"), vnf_dict[
|
LOG.debug("Heat stack %s delete initiated",
|
||||||
'instance_id'])
|
vnf_dict['instance_id'])
|
||||||
_log_monitor_events(context, vnf_dict, "ActionRespawnHeat invoked")
|
_log_monitor_events(context, vnf_dict, "ActionRespawnHeat invoked")
|
||||||
|
|
||||||
def _respawn_vnf():
|
def _respawn_vnf():
|
||||||
update_vnf_dict = plugin.create_vnf_sync(context, vnf_dict)
|
update_vnf_dict = plugin.create_vnf_sync(context, vnf_dict)
|
||||||
LOG.info(_('respawned new vnf %s'), update_vnf_dict['id'])
|
LOG.info('respawned new vnf %s', update_vnf_dict['id'])
|
||||||
plugin.config_vnf(context, update_vnf_dict)
|
plugin.config_vnf(context, update_vnf_dict)
|
||||||
return update_vnf_dict
|
return update_vnf_dict
|
||||||
|
|
||||||
@ -87,8 +87,8 @@ class VNFActionRespawn(abstract_action.AbstractPolicyAction):
|
|||||||
_delete_heat_stack(vim_res['vim_auth'])
|
_delete_heat_stack(vim_res['vim_auth'])
|
||||||
updated_vnf = _respawn_vnf()
|
updated_vnf = _respawn_vnf()
|
||||||
plugin.add_vnf_to_monitor(context, updated_vnf)
|
plugin.add_vnf_to_monitor(context, updated_vnf)
|
||||||
LOG.debug(_("VNF %s added to monitor thread"), updated_vnf[
|
LOG.debug("VNF %s added to monitor thread",
|
||||||
'id'])
|
updated_vnf['id'])
|
||||||
if vnf_dict['attributes'].get('alarming_policy'):
|
if vnf_dict['attributes'].get('alarming_policy'):
|
||||||
_delete_heat_stack(vim_res['vim_auth'])
|
_delete_heat_stack(vim_res['vim_auth'])
|
||||||
vnf_dict['attributes'].pop('alarming_policy')
|
vnf_dict['attributes'].pop('alarming_policy')
|
||||||
|
@ -39,8 +39,8 @@ class VimClient(object):
|
|||||||
constants.NFVO)
|
constants.NFVO)
|
||||||
|
|
||||||
if not vim_id:
|
if not vim_id:
|
||||||
LOG.debug(_('VIM id not provided. Attempting to find default '
|
LOG.debug('VIM id not provided. Attempting to find default '
|
||||||
'VIM information'))
|
'VIM information')
|
||||||
try:
|
try:
|
||||||
vim_info = nfvo_plugin.get_default_vim(context)
|
vim_info = nfvo_plugin.get_default_vim(context)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
@ -52,7 +52,7 @@ class VimClient(object):
|
|||||||
mask_password=False)
|
mask_password=False)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise nfvo.VimNotFoundException(vim_id=vim_id)
|
raise nfvo.VimNotFoundException(vim_id=vim_id)
|
||||||
LOG.debug(_('VIM info found for vim id %s'), vim_id)
|
LOG.debug('VIM info found for vim id %s', vim_id)
|
||||||
if region_name and not self.region_valid(vim_info['placement_attr']
|
if region_name and not self.region_valid(vim_info['placement_attr']
|
||||||
['regions'], region_name):
|
['regions'], region_name):
|
||||||
raise nfvo.VimRegionNotFoundException(region_name=region_name)
|
raise nfvo.VimRegionNotFoundException(region_name=region_name)
|
||||||
@ -101,7 +101,7 @@ class VimClient(object):
|
|||||||
|
|
||||||
f = fernet.Fernet(vim_key)
|
f = fernet.Fernet(vim_key)
|
||||||
if not f:
|
if not f:
|
||||||
LOG.warning(_('Unable to decode VIM auth'))
|
LOG.warning('Unable to decode VIM auth')
|
||||||
raise nfvo.VimNotFoundException(
|
raise nfvo.VimNotFoundException(
|
||||||
'Unable to decode VIM auth key')
|
'Unable to decode VIM auth key')
|
||||||
return f.decrypt(cred)
|
return f.decrypt(cred)
|
||||||
@ -109,7 +109,7 @@ class VimClient(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _find_vim_key(vim_id):
|
def _find_vim_key(vim_id):
|
||||||
key_file = os.path.join(CONF.vim_keys.openstack, vim_id)
|
key_file = os.path.join(CONF.vim_keys.openstack, vim_id)
|
||||||
LOG.debug(_('Attempting to open key file for vim id %s'), vim_id)
|
LOG.debug('Attempting to open key file for vim id %s', vim_id)
|
||||||
with open(key_file, 'r') as f:
|
with open(key_file, 'r') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
LOG.warning(_('VIM id invalid or key not found for %s'), vim_id)
|
LOG.warning('VIM id invalid or key not found for %s', vim_id)
|
||||||
|
@ -145,7 +145,7 @@ class Server(object):
|
|||||||
family = info[0]
|
family = info[0]
|
||||||
bind_addr = info[-1]
|
bind_addr = info[-1]
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception(_("Unable to listen on %(host)s:%(port)s"),
|
LOG.exception("Unable to listen on %(host)s:%(port)s",
|
||||||
{'host': host, 'port': port})
|
{'host': host, 'port': port})
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -355,7 +355,7 @@ class Request(webob.Request):
|
|||||||
def get_content_type(self):
|
def get_content_type(self):
|
||||||
allowed_types = ("application/json")
|
allowed_types = ("application/json")
|
||||||
if "Content-Type" not in self.headers:
|
if "Content-Type" not in self.headers:
|
||||||
LOG.debug(_("Missing Content-Type"))
|
LOG.debug("Missing Content-Type")
|
||||||
return None
|
return None
|
||||||
_type = self.content_type
|
_type = self.content_type
|
||||||
if _type in allowed_types:
|
if _type in allowed_types:
|
||||||
@ -533,23 +533,23 @@ class RequestDeserializer(object):
|
|||||||
try:
|
try:
|
||||||
content_type = request.best_match_content_type()
|
content_type = request.best_match_content_type()
|
||||||
except exception.InvalidContentType:
|
except exception.InvalidContentType:
|
||||||
LOG.debug(_("Unrecognized Content-Type provided in request"))
|
LOG.debug("Unrecognized Content-Type provided in request")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
if content_type is None:
|
if content_type is None:
|
||||||
LOG.debug(_("No Content-Type provided in request"))
|
LOG.debug("No Content-Type provided in request")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
if not len(request.body) > 0:
|
if not len(request.body) > 0:
|
||||||
LOG.debug(_("Empty body provided in request"))
|
LOG.debug("Empty body provided in request")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
deserializer = self.get_body_deserializer(content_type)
|
deserializer = self.get_body_deserializer(content_type)
|
||||||
except exception.InvalidContentType:
|
except exception.InvalidContentType:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.debug(_("Unable to deserialize body as provided "
|
LOG.debug("Unable to deserialize body as provided "
|
||||||
"Content-Type"))
|
"Content-Type")
|
||||||
|
|
||||||
return deserializer.deserialize(request.body, action)
|
return deserializer.deserialize(request.body, action)
|
||||||
|
|
||||||
@ -780,28 +780,28 @@ class Resource(Application):
|
|||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
"""WSGI method that controls (de)serialization and method dispatch."""
|
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||||
|
|
||||||
LOG.info(_("%(method)s %(url)s"), {"method": request.method,
|
LOG.info("%(method)s %(url)s", {"method": request.method,
|
||||||
"url": request.url})
|
"url": request.url})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
action, args, accept = self.deserializer.deserialize(request)
|
action, args, accept = self.deserializer.deserialize(request)
|
||||||
except exception.InvalidContentType:
|
except exception.InvalidContentType:
|
||||||
msg = _("Unsupported Content-Type")
|
LOG.exception("InvalidContentType: Unsupported Content-Type")
|
||||||
LOG.exception(_("InvalidContentType: %s"), msg)
|
return Fault(webob.exc.HTTPBadRequest(
|
||||||
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
|
explanation=_("Unsupported Content-Type")))
|
||||||
except exception.MalformedRequestBody:
|
except exception.MalformedRequestBody:
|
||||||
msg = _("Malformed request body")
|
LOG.exception("MalformedRequestBody: Malformed request body")
|
||||||
LOG.exception(_("MalformedRequestBody: %s"), msg)
|
return Fault(webob.exc.HTTPBadRequest(
|
||||||
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
|
explanation=_("Malformed request body")))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
action_result = self.dispatch(request, action, args)
|
action_result = self.dispatch(request, action, args)
|
||||||
except webob.exc.HTTPException as ex:
|
except webob.exc.HTTPException as ex:
|
||||||
LOG.info(_("HTTP exception thrown: %s"), ex)
|
LOG.info("HTTP exception thrown: %s", ex)
|
||||||
action_result = Fault(ex,
|
action_result = Fault(ex,
|
||||||
self._fault_body_function)
|
self._fault_body_function)
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception(_("Internal error"))
|
LOG.exception("Internal error")
|
||||||
# Do not include the traceback to avoid returning it to clients.
|
# Do not include the traceback to avoid returning it to clients.
|
||||||
action_result = Fault(webob.exc.HTTPServerError(),
|
action_result = Fault(webob.exc.HTTPServerError(),
|
||||||
self._fault_body_function)
|
self._fault_body_function)
|
||||||
|
Loading…
Reference in New Issue
Block a user