Syncing with mitaka dependencies

Change-Id: Ice1c6555d7e008fff0cb174f1ed6d6d4a3f152e3
This commit is contained in:
Sumit Naiksatam 2016-02-05 20:15:26 -08:00
parent 8e606f5a6a
commit c614996bcf
50 changed files with 983 additions and 847 deletions

View File

@ -12,6 +12,8 @@
import contextlib
from neutron._i18n import _
from neutron._i18n import _LE
from neutron import context as n_ctx
from oslo_config import cfg
from oslo_log import log as logging
@ -19,6 +21,7 @@ from oslo_utils import importutils
from stevedore import driver
LOG = logging.getLogger(__name__)
cfg.CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token')
@contextlib.contextmanager
@ -49,8 +52,8 @@ def load_plugin(namespace, plugin):
try:
plugin_class = importutils.import_class(plugin)
except ImportError as e2:
LOG.exception(_("Error loading plugin by name, %s"), e1)
LOG.exception(_("Error loading plugin by class, %s"), e2)
LOG.exception(_LE("Error loading plugin by name, %s"), e1)
LOG.exception(_LE("Error loading plugin by class, %s"), e2)
raise ImportError(_("Plugin not found."))
return plugin_class()

View File

@ -29,7 +29,8 @@ class API(object):
obj_method = getattr(neutron, action)
return obj_method(resource_id)[resource]
def _list_resources(self, context, resource, filters={}):
def _list_resources(self, context, resource, filters=None):
filters = filters or {}
resources = resource + 's'
action = 'list_' + resources
neutron = client.get_client(context)
@ -54,7 +55,8 @@ class API(object):
def show_network(self, context, net_id):
return self._show_resource(context, 'network', net_id)
def list_networks(self, context, filters={}):
def list_networks(self, context, filters=None):
filters = filters or {}
return self._list_resources(context, 'network', filters)
def update_network(self, context, net_id, network):
@ -69,7 +71,8 @@ class API(object):
def show_subnet(self, context, subnet_id):
return self._show_resource(context, 'subnet', subnet_id)
def list_subnets(self, context, filters={}):
def list_subnets(self, context, filters=None):
filters = filters or {}
return self._list_resources(context, 'subnet', filters)
def update_subnet(self, context, subnet_id, subnet):
@ -84,7 +87,8 @@ class API(object):
def show_port(self, context, port_id):
return self._show_resource(context, 'port', port_id)
def list_ports(self, context, filters={}):
def list_ports(self, context, filters=None):
filters = filters or {}
return self._list_resources(context, 'port', filters)
def update_port(self, context, port_id, port):
@ -99,7 +103,8 @@ class API(object):
def show_security_group(self, context, sg_id):
return self._show_resource(context, 'security_group', sg_id)
def list_security_groups(self, context, filters={}):
def list_security_groups(self, context, filters=None):
filters = filters or {}
return self._list_resources(context, 'security_group', filters)
def update_security_group(self, context, sg_id, sg):
@ -114,7 +119,8 @@ class API(object):
def show_security_group_rule(self, context, rule_id):
return self._show_resource(context, 'security_group_rule', rule_id)
def list_security_group_rules(self, context, filters={}):
def list_security_group_rules(self, context, filters=None):
filters = filters or {}
return self._list_resources(context, 'security_group_rule', filters)
# REVISIT(yi): update_security_group_rule not supported in neutron yet
@ -133,7 +139,8 @@ class API(object):
def show_router(self, context, router_id):
return self._show_resource(context, 'router', router_id)
def list_routers(self, context, filters={}):
def list_routers(self, context, filters=None):
filters = filters or {}
return self._list_resources(context, 'router', filters)
def update_router(self, context, router_id, router):

View File

@ -11,6 +11,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron._i18n import _LE
from neutron._i18n import _LW
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.common import constants as const
from neutron.common import exceptions as n_exc
@ -50,7 +52,7 @@ class LocalAPI(object):
plugins = manager.NeutronManager.get_service_plugins()
l3_plugin = plugins.get(pconst.L3_ROUTER_NAT)
if not l3_plugin:
LOG.error(_("No L3 router service plugin found."))
LOG.error(_LE("No L3 router service plugin found."))
raise exc.GroupPolicyDeploymentError()
return l3_plugin
@ -61,7 +63,7 @@ class LocalAPI(object):
plugins = manager.NeutronManager.get_service_plugins()
group_policy_plugin = plugins.get(pconst.GROUP_POLICY)
if not group_policy_plugin:
LOG.error(_("No GroupPolicy service plugin found."))
LOG.error(_LE("No GroupPolicy service plugin found."))
raise exc.GroupPolicyDeploymentError()
return group_policy_plugin
@ -72,7 +74,7 @@ class LocalAPI(object):
plugins = manager.NeutronManager.get_service_plugins()
servicechain_plugin = plugins.get(pconst.SERVICECHAIN)
if not servicechain_plugin:
LOG.error(_("No Servicechain service plugin found."))
LOG.error(_LE("No Servicechain service plugin found."))
raise exc.GroupPolicyDeploymentError()
return servicechain_plugin
@ -101,9 +103,8 @@ class LocalAPI(object):
{resource: obj})
# REVISIT(rkukura): Do create.end notification?
if cfg.CONF.dhcp_agent_notification:
self._dhcp_agent_notifier.notify(context,
{resource: obj},
resource + '.create.end')
self._dhcp_agent_notifier.notify(
context, {resource: obj}, resource + '.create.end')
return obj
def _update_resource(self, plugin, context, resource, resource_id, attrs,
@ -186,7 +187,7 @@ class LocalAPI(object):
self._delete_resource(self._core_plugin,
plugin_context, 'port', port_id)
except n_exc.PortNotFound:
LOG.warning(_('Port %s already deleted'), port_id)
LOG.warning(_LW('Port %s already deleted'), port_id)
def _get_subnet(self, plugin_context, subnet_id):
return self._get_resource(self._core_plugin, plugin_context, 'subnet',
@ -210,7 +211,7 @@ class LocalAPI(object):
self._delete_resource(self._core_plugin, plugin_context, 'subnet',
subnet_id)
except n_exc.SubnetNotFound:
LOG.warning(_('Subnet %s already deleted'), subnet_id)
LOG.warning(_LW('Subnet %s already deleted'), subnet_id)
def _get_network(self, plugin_context, network_id):
return self._get_resource(self._core_plugin, plugin_context, 'network',
@ -230,7 +231,7 @@ class LocalAPI(object):
self._delete_resource(self._core_plugin, plugin_context,
'network', network_id)
except n_exc.NetworkNotFound:
LOG.warning(_('Network %s already deleted'), network_id)
LOG.warning(_LW('Network %s already deleted'), network_id)
def _get_router(self, plugin_context, router_id):
return self._get_resource(self._l3_plugin, plugin_context, 'router',
@ -259,7 +260,7 @@ class LocalAPI(object):
self._l3_plugin.remove_router_interface(plugin_context, router_id,
interface_info)
except l3.RouterInterfaceNotFoundForSubnet:
LOG.warning(_('Router interface already deleted for subnet %s'),
LOG.warning(_LW('Router interface already deleted for subnet %s'),
interface_info)
def _add_router_gw_interface(self, plugin_context, router_id, gw_info):
@ -278,7 +279,7 @@ class LocalAPI(object):
self._delete_resource(self._l3_plugin, plugin_context, 'router',
router_id)
except l3.RouterNotFound:
LOG.warning(_('Router %s already deleted'), router_id)
LOG.warning(_LW('Router %s already deleted'), router_id)
def _get_sg(self, plugin_context, sg_id):
return self._get_resource(
@ -302,7 +303,7 @@ class LocalAPI(object):
self._delete_resource(self._core_plugin, plugin_context,
'security_group', sg_id)
except ext_sg.SecurityGroupNotFound:
LOG.warning(_('Security Group %s already deleted'), sg_id)
LOG.warning(_LW('Security Group %s already deleted'), sg_id)
def _get_sg_rule(self, plugin_context, sg_rule_id):
return self._get_resource(
@ -319,7 +320,7 @@ class LocalAPI(object):
return self._create_resource(self._core_plugin, plugin_context,
'security_group_rule', attrs)
except ext_sg.SecurityGroupRuleExists as ex:
LOG.warning(_('Security Group already exists %s'), ex.message)
LOG.warning(_LW('Security Group already exists %s'), ex.message)
return
def _update_sg_rule(self, plugin_context, sg_rule_id, attrs):
@ -332,7 +333,7 @@ class LocalAPI(object):
self._delete_resource(self._core_plugin, plugin_context,
'security_group_rule', sg_rule_id)
except ext_sg.SecurityGroupRuleNotFound:
LOG.warning(_('Security Group Rule %s already deleted'),
LOG.warning(_LW('Security Group Rule %s already deleted'),
sg_rule_id)
def _get_fip(self, plugin_context, fip_id):
@ -357,7 +358,7 @@ class LocalAPI(object):
self._delete_resource(self._l3_plugin, plugin_context,
'floatingip', fip_id)
except l3.FloatingIPNotFound:
LOG.warning(_('Floating IP %s Already deleted'), fip_id)
LOG.warning(_LW('Floating IP %s Already deleted'), fip_id)
def _get_l2_policy(self, plugin_context, l2p_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -381,7 +382,7 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin,
plugin_context, 'l2_policy', l2p_id, False)
except gp_ext.L2PolicyNotFound:
LOG.warning(_('L2 Policy %s already deleted'), l2p_id)
LOG.warning(_LW('L2 Policy %s already deleted'), l2p_id)
def _get_l3_policy(self, plugin_context, l3p_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -405,7 +406,7 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin,
plugin_context, 'l3_policy', l3p_id, False)
except gp_ext.L3PolicyNotFound:
LOG.warning(_('L3 Policy %s already deleted'), l3p_id)
LOG.warning(_LW('L3 Policy %s already deleted'), l3p_id)
def _get_external_segment(self, plugin_context, es_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -429,7 +430,7 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin, plugin_context,
'external_segment', es_id, False)
except gp_ext.ExternalSegmentNotFound:
LOG.warning(_('External Segment %s already deleted'), es_id)
LOG.warning(_LW('External Segment %s already deleted'), es_id)
def _get_external_policy(self, plugin_context, ep_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -453,7 +454,7 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin, plugin_context,
'external_policy', ep_id, False)
except gp_ext.ExternalPolicyNotFound:
LOG.warning(_('External Policy %s already deleted'), ep_id)
LOG.warning(_LW('External Policy %s already deleted'), ep_id)
def _get_policy_rule_set(self, plugin_context, prs_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -477,7 +478,7 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin, plugin_context,
'policy_rule_set', prs_id, False)
except gp_ext.PolicyRuleSetNotFound:
LOG.warning(_('Policy Rule Set %s already deleted'), prs_id)
LOG.warning(_LW('Policy Rule Set %s already deleted'), prs_id)
def _get_servicechain_instance(self, plugin_context, sci_id):
return self._get_resource(self._servicechain_plugin, plugin_context,
@ -502,7 +503,7 @@ class LocalAPI(object):
self._delete_resource(self._servicechain_plugin, plugin_context,
'servicechain_instance', sci_id, False)
except sc_ext.ServiceChainInstanceNotFound:
LOG.warning(_("servicechain %s already deleted"), sci_id)
LOG.warning(_LW("servicechain %s already deleted"), sci_id)
def _get_servicechain_spec(self, plugin_context, scs_id):
return self._get_resource(self._servicechain_plugin, plugin_context,
@ -526,7 +527,7 @@ class LocalAPI(object):
self._delete_resource(self._servicechain_plugin, plugin_context,
'servicechain_spec', scs_id)
except sc_ext.ServiceChainSpecNotFound:
LOG.warning(_("servicechain spec %s already deleted"), scs_id)
LOG.warning(_LW("servicechain spec %s already deleted"), scs_id)
def _get_policy_target(self, plugin_context, pt_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -550,7 +551,7 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin, plugin_context,
'policy_target', pt_id, False)
except gp_ext.PolicyTargetNotFound:
LOG.warning(_('Policy Rule Set %s already deleted'), pt_id)
LOG.warning(_LW('Policy Rule Set %s already deleted'), pt_id)
def _get_policy_target_group(self, plugin_context, ptg_id):
return self._get_resource(self._group_policy_plugin, plugin_context,
@ -575,4 +576,4 @@ class LocalAPI(object):
self._delete_resource(self._group_policy_plugin, plugin_context,
'policy_target_group', ptg_id)
except sc_ext.ServiceChainSpecNotFound:
LOG.warning(_("Policy Target Group %s already deleted"), ptg_id)
LOG.warning(_LW("Policy Target Group %s already deleted"), ptg_id)

View File

@ -12,11 +12,11 @@
import netaddr
from neutron.api.v2 import attributes as attr
from neutron.common import log
from neutron import context
from neutron.db import common_db_mixin
from neutron.db import model_base
from neutron.db import models_v2
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import uuidutils
import sqlalchemy as sa
@ -1134,7 +1134,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
raise gpolicy.SubnetPrefixLengthExceedsIpPool(
ip_pool=ip_pool, subnet_size=new_prefix_length)
@log.log
@log.log_method_call
def create_policy_target(self, context, policy_target):
pt = policy_target['policy_target']
tenant_id = self._get_tenant_id_for_create(context, pt)
@ -1147,7 +1147,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context.session.add(pt_db)
return self._make_policy_target_dict(pt_db)
@log.log
@log.log_method_call
def update_policy_target(self, context, policy_target_id, policy_target):
pt = policy_target['policy_target']
with context.session.begin(subtransactions=True):
@ -1155,18 +1155,18 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
pt_db.update(pt)
return self._make_policy_target_dict(pt_db)
@log.log
@log.log_method_call
def delete_policy_target(self, context, policy_target_id):
with context.session.begin(subtransactions=True):
pt_db = self._get_policy_target(context, policy_target_id)
context.session.delete(pt_db)
@log.log
@log.log_method_call
def get_policy_target(self, context, policy_target_id, fields=None):
pt = self._get_policy_target(context, policy_target_id)
return self._make_policy_target_dict(pt, fields)
@log.log
@log.log_method_call
def get_policy_targets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1179,12 +1179,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_policy_targets_count(self, context, filters=None):
return self._get_collection_count(context, PolicyTarget,
filters=filters)
@log.log
@log.log_method_call
def create_policy_target_group(self, context, policy_target_group):
ptg = policy_target_group['policy_target_group']
tenant_id = self._get_tenant_id_for_create(context, ptg)
@ -1202,7 +1202,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
self._process_policy_rule_sets_for_ptg(context, ptg_db, ptg)
return self._make_policy_target_group_dict(ptg_db)
@log.log
@log.log_method_call
def update_policy_target_group(self, context, policy_target_group_id,
policy_target_group):
ptg = policy_target_group['policy_target_group']
@ -1213,7 +1213,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
ptg_db.update(ptg)
return self._make_policy_target_group_dict(ptg_db)
@log.log
@log.log_method_call
def delete_policy_target_group(self, context, policy_target_group_id):
with context.session.begin(subtransactions=True):
ptg_db = self._get_policy_target_group(
@ -1231,13 +1231,13 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
# other resources.
context.session.delete(ptg_db)
@log.log
@log.log_method_call
def get_policy_target_group(self, context, policy_target_group_id,
fields=None):
ptg = self._get_policy_target_group(context, policy_target_group_id)
return self._make_policy_target_group_dict(ptg, fields)
@log.log
@log.log_method_call
def get_policy_target_groups(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1250,12 +1250,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_policy_target_groups_count(self, context, filters=None):
return self._get_collection_count(context, PolicyTargetGroup,
filters=filters)
@log.log
@log.log_method_call
def create_l2_policy(self, context, l2_policy):
l2p = l2_policy['l2_policy']
tenant_id = self._get_tenant_id_for_create(context, l2p)
@ -1270,7 +1270,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context.session.add(l2p_db)
return self._make_l2_policy_dict(l2p_db)
@log.log
@log.log_method_call
def update_l2_policy(self, context, l2_policy_id, l2_policy):
l2p = l2_policy['l2_policy']
with context.session.begin(subtransactions=True):
@ -1278,7 +1278,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
l2p_db.update(l2p)
return self._make_l2_policy_dict(l2p_db)
@log.log
@log.log_method_call
def delete_l2_policy(self, context, l2_policy_id):
with context.session.begin(subtransactions=True):
l2p_db = self._get_l2_policy(context, l2_policy_id)
@ -1286,12 +1286,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
raise gpolicy.L2PolicyInUse(l2_policy_id=l2_policy_id)
context.session.delete(l2p_db)
@log.log
@log.log_method_call
def get_l2_policy(self, context, l2_policy_id, fields=None):
l2p = self._get_l2_policy(context, l2_policy_id)
return self._make_l2_policy_dict(l2p, fields)
@log.log
@log.log_method_call
def get_l2_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1304,12 +1304,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_l2_policies_count(self, context, filters=None):
return self._get_collection_count(context, L2Policy,
filters=filters)
@log.log
@log.log_method_call
def create_l3_policy(self, context, l3_policy):
l3p = l3_policy['l3_policy']
tenant_id = self._get_tenant_id_for_create(context, l3p)
@ -1332,7 +1332,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context.session.add(l3p_db)
return self._make_l3_policy_dict(l3p_db)
@log.log
@log.log_method_call
def update_l3_policy(self, context, l3_policy_id, l3_policy):
l3p = l3_policy['l3_policy']
with context.session.begin(subtransactions=True):
@ -1348,7 +1348,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
l3p_db.update(l3p)
return self._make_l3_policy_dict(l3p_db)
@log.log
@log.log_method_call
def delete_l3_policy(self, context, l3_policy_id):
with context.session.begin(subtransactions=True):
l3p_db = self._get_l3_policy(context, l3_policy_id)
@ -1356,12 +1356,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
raise gpolicy.L3PolicyInUse(l3_policy_id=l3_policy_id)
context.session.delete(l3p_db)
@log.log
@log.log_method_call
def get_l3_policy(self, context, l3_policy_id, fields=None):
l3p = self._get_l3_policy(context, l3_policy_id)
return self._make_l3_policy_dict(l3p, fields)
@log.log
@log.log_method_call
def get_l3_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1374,12 +1374,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_l3_policies_count(self, context, filters=None):
return self._get_collection_count(context, L3Policy,
filters=filters)
@log.log
@log.log_method_call
def create_network_service_policy(self, context, network_service_policy):
nsp = network_service_policy['network_service_policy']
tenant_id = self._get_tenant_id_for_create(context, nsp)
@ -1394,7 +1394,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context, nsp_db, nsp)
return self._make_network_service_policy_dict(nsp_db)
@log.log
@log.log_method_call
def update_network_service_policy(
self, context, network_service_policy_id, network_service_policy):
nsp = network_service_policy['network_service_policy']
@ -1407,7 +1407,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
nsp_db.update(nsp)
return self._make_network_service_policy_dict(nsp_db)
@log.log
@log.log_method_call
def delete_network_service_policy(
self, context, network_service_policy_id):
with context.session.begin(subtransactions=True):
@ -1418,14 +1418,14 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
network_service_policy_id=network_service_policy_id)
context.session.delete(nsp_db)
@log.log
@log.log_method_call
def get_network_service_policy(
self, context, network_service_policy_id, fields=None):
nsp = self._get_network_service_policy(
context, network_service_policy_id)
return self._make_network_service_policy_dict(nsp, fields)
@log.log
@log.log_method_call
def get_network_service_policies(
self, context, filters=None, fields=None, sorts=None, limit=None,
marker=None, page_reverse=False):
@ -1438,12 +1438,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_network_service_policies_count(self, context, filters=None):
return self._get_collection_count(context, NetworkServicePolicy,
filters=filters)
@log.log
@log.log_method_call
def create_policy_classifier(self, context, policy_classifier):
pc = policy_classifier['policy_classifier']
tenant_id = self._get_tenant_id_for_create(context, pc)
@ -1462,7 +1462,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context.session.add(pc_db)
return self._make_policy_classifier_dict(pc_db)
@log.log
@log.log_method_call
def update_policy_classifier(self, context, policy_classifier_id,
policy_classifier):
pc = policy_classifier['policy_classifier']
@ -1478,7 +1478,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
pc_db.update(pc)
return self._make_policy_classifier_dict(pc_db)
@log.log
@log.log_method_call
def delete_policy_classifier(self, context, policy_classifier_id):
with context.session.begin(subtransactions=True):
pc_db = self._get_policy_classifier(context, policy_classifier_id)
@ -1489,13 +1489,13 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
policy_classifier_id=policy_classifier_id)
context.session.delete(pc_db)
@log.log
@log.log_method_call
def get_policy_classifier(self, context, policy_classifier_id,
fields=None):
pc = self._get_policy_classifier(context, policy_classifier_id)
return self._make_policy_classifier_dict(pc, fields)
@log.log
@log.log_method_call
def get_policy_classifiers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1508,12 +1508,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_policy_classifiers_count(self, context, filters=None):
return self._get_collection_count(context, PolicyClassifier,
filters=filters)
@log.log
@log.log_method_call
def create_policy_action(self, context, policy_action):
pa = policy_action['policy_action']
tenant_id = self._get_tenant_id_for_create(context, pa)
@ -1528,7 +1528,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context.session.add(pa_db)
return self._make_policy_action_dict(pa_db)
@log.log
@log.log_method_call
def update_policy_action(self, context, policy_action_id, policy_action):
pa = policy_action['policy_action']
with context.session.begin(subtransactions=True):
@ -1536,7 +1536,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
pa_db.update(pa)
return self._make_policy_action_dict(pa_db)
@log.log
@log.log_method_call
def delete_policy_action(self, context, policy_action_id):
with context.session.begin(subtransactions=True):
pa_db = self._get_policy_action(context, policy_action_id)
@ -1546,12 +1546,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
policy_action_id=policy_action_id)
context.session.delete(pa_db)
@log.log
@log.log_method_call
def get_policy_action(self, context, id, fields=None):
pa = self._get_policy_action(context, id)
return self._make_policy_action_dict(pa, fields)
@log.log
@log.log_method_call
def get_policy_actions(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1564,12 +1564,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_policy_actions_count(self, context, filters=None):
return self._get_collection_count(context, PolicyAction,
filters=filters)
@log.log
@log.log_method_call
def create_policy_rule(self, context, policy_rule):
pr = policy_rule['policy_rule']
tenant_id = self._get_tenant_id_for_create(context, pr)
@ -1585,7 +1585,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
pr['policy_actions'])
return self._make_policy_rule_dict(pr_db)
@log.log
@log.log_method_call
def update_policy_rule(self, context, policy_rule_id, policy_rule):
pr = policy_rule['policy_rule']
with context.session.begin(subtransactions=True):
@ -1597,7 +1597,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
pr_db.update(pr)
return self._make_policy_rule_dict(pr_db)
@log.log
@log.log_method_call
def delete_policy_rule(self, context, policy_rule_id):
with context.session.begin(subtransactions=True):
pr_db = self._get_policy_rule(context, policy_rule_id)
@ -1607,12 +1607,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
raise gpolicy.PolicyRuleInUse(policy_rule_id=policy_rule_id)
context.session.delete(pr_db)
@log.log
@log.log_method_call
def get_policy_rule(self, context, policy_rule_id, fields=None):
pr = self._get_policy_rule(context, policy_rule_id)
return self._make_policy_rule_dict(pr, fields)
@log.log
@log.log_method_call
def get_policy_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1625,12 +1625,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_policy_rules_count(self, context, filters=None):
return self._get_collection_count(context, PolicyRule,
filters=filters)
@log.log
@log.log_method_call
def create_policy_rule_set(self, context, policy_rule_set):
prs = policy_rule_set['policy_rule_set']
tenant_id = self._get_tenant_id_for_create(context, prs)
@ -1647,7 +1647,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context, prs_db, prs['child_policy_rule_sets'])
return self._make_policy_rule_set_dict(prs_db)
@log.log
@log.log_method_call
def update_policy_rule_set(self, context, policy_rule_set_id,
policy_rule_set):
prs = policy_rule_set['policy_rule_set']
@ -1664,7 +1664,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
prs_db.update(prs)
return self._make_policy_rule_set_dict(prs_db)
@log.log
@log.log_method_call
def delete_policy_rule_set(self, context, policy_rule_set_id):
with context.session.begin(subtransactions=True):
prs_db = self._get_policy_rule_set(context, policy_rule_set_id)
@ -1683,12 +1683,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
policy_rule_set_id)
context.session.delete(prs_db)
@log.log
@log.log_method_call
def get_policy_rule_set(self, context, policy_rule_set_id, fields=None):
prs = self._get_policy_rule_set(context, policy_rule_set_id)
return self._make_policy_rule_set_dict(prs, fields)
@log.log
@log.log_method_call
def get_policy_rule_sets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1701,12 +1701,12 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_policy_rule_sets_count(self, context, filters=None):
return self._get_collection_count(context, PolicyRuleSet,
filters=filters)
@log.log
@log.log_method_call
def create_external_policy(self, context, external_policy):
ep = external_policy['external_policy']
tenant_id = self._get_tenant_id_for_create(context, ep)
@ -1722,7 +1722,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
self._process_policy_rule_sets_for_ep(context, ep_db, ep)
return self._make_external_policy_dict(ep_db)
@log.log
@log.log_method_call
def update_external_policy(self, context, external_policy_id,
external_policy):
ep = external_policy['external_policy']
@ -1737,7 +1737,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
ep_db.update(ep)
return self._make_external_policy_dict(ep_db)
@log.log
@log.log_method_call
def get_external_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1750,25 +1750,25 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_external_policies_count(self, context, filters=None):
return self._get_collection_count(context, ExternalPolicy,
filters=filters)
@log.log
@log.log_method_call
def get_external_policy(self, context, external_policy_id, fields=None):
ep = self._get_external_policy(
context, external_policy_id)
return self._make_external_policy_dict(ep, fields)
@log.log
@log.log_method_call
def delete_external_policy(self, context, external_policy_id):
with context.session.begin(subtransactions=True):
ep_db = self._get_external_policy(
context, external_policy_id)
context.session.delete(ep_db)
@log.log
@log.log_method_call
def create_external_segment(self, context, external_segment):
es = external_segment['external_segment']
tenant_id = self._get_tenant_id_for_create(context, es)
@ -1784,7 +1784,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
self._process_segment_ers(context, es_db, es)
return self._make_external_segment_dict(es_db)
@log.log
@log.log_method_call
def update_external_segment(self, context, external_segment_id,
external_segment):
es = external_segment['external_segment']
@ -1797,7 +1797,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
es_db.update(es)
return self._make_external_segment_dict(es_db)
@log.log
@log.log_method_call
def get_external_segments(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1810,25 +1810,25 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_external_segments_count(self, context, filters=None):
return self._get_collection_count(context, ExternalSegment,
filters=filters)
@log.log
@log.log_method_call
def get_external_segment(self, context, external_segment_id, fields=None):
es = self._get_external_segment(
context, external_segment_id)
return self._make_external_segment_dict(es, fields)
@log.log
@log.log_method_call
def delete_external_segment(self, context, external_segment_id):
with context.session.begin(subtransactions=True):
es_db = self._get_external_segment(
context, external_segment_id)
context.session.delete(es_db)
@log.log
@log.log_method_call
def create_nat_pool(self, context, nat_pool):
np = nat_pool['nat_pool']
tenant_id = self._get_tenant_id_for_create(context, np)
@ -1842,7 +1842,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
context.session.add(np_db)
return self._make_nat_pool_dict(np_db)
@log.log
@log.log_method_call
def update_nat_pool(self, context, nat_pool_id, nat_pool):
np = nat_pool['nat_pool']
with context.session.begin(subtransactions=True):
@ -1851,7 +1851,7 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
np_db.update(np)
return self._make_nat_pool_dict(np_db)
@log.log
@log.log_method_call
def get_nat_pools(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -1864,16 +1864,16 @@ class GroupPolicyDbPlugin(gpolicy.GroupPolicyPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_nat_pools_count(self, context, filters=None):
return self._get_collection_count(context, NATPool, filters=filters)
@log.log
@log.log_method_call
def get_nat_pool(self, context, nat_pool_id, fields=None):
np = self._get_nat_pool(context, nat_pool_id)
return self._make_nat_pool_dict(np, fields)
@log.log
@log.log_method_call
def delete_nat_pool(self, context, nat_pool_id):
with context.session.begin(subtransactions=True):
np_db = self._get_nat_pool(context, nat_pool_id)

View File

@ -10,8 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from neutron.db import model_base
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import uuidutils
import sqlalchemy as sa
@ -251,7 +251,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
attribute='allowed_address_pairs',
reason='read only attribute')
@log.log
@log.log_method_call
def create_policy_target(self, context, policy_target):
pt = policy_target['policy_target']
tenant_id = self._get_tenant_id_for_create(context, pt)
@ -269,12 +269,12 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
return self._make_policy_target_dict(
pt_db, port_attributes=pt.get('port_attributes', {}))
@log.log
@log.log_method_call
def get_policy_targets_count(self, context, filters=None):
return self._get_collection_count(context, PolicyTargetMapping,
filters=filters)
@log.log
@log.log_method_call
def get_policy_targets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -287,7 +287,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def create_policy_target_group(self, context, policy_target_group):
ptg = policy_target_group['policy_target_group']
tenant_id = self._get_tenant_id_for_create(context, ptg)
@ -312,7 +312,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
self._process_policy_rule_sets_for_ptg(context, ptg_db, ptg)
return self._make_policy_target_group_dict(ptg_db)
@log.log
@log.log_method_call
def update_policy_target_group(self, context, policy_target_group_id,
policy_target_group):
ptg = policy_target_group['policy_target_group']
@ -343,7 +343,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
ptg_db.update(ptg)
return self._make_policy_target_group_dict(ptg_db)
@log.log
@log.log_method_call
def create_l2_policy(self, context, l2_policy):
l2p = l2_policy['l2_policy']
tenant_id = self._get_tenant_id_for_create(context, l2p)
@ -360,7 +360,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
context.session.add(l2p_db)
return self._make_l2_policy_dict(l2p_db)
@log.log
@log.log_method_call
def get_l2_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -373,12 +373,12 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_l2_policies_count(self, context, filters=None):
return self._get_collection_count(context, L2PolicyMapping,
filters=filters)
@log.log
@log.log_method_call
def create_l3_policy(self, context, l3_policy):
l3p = l3_policy['l3_policy']
self.validate_ip_pool(l3p.get('ip_pool', None), l3p['ip_version'])
@ -409,7 +409,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
context.session.add(l3p_db)
return self._make_l3_policy_dict(l3p_db)
@log.log
@log.log_method_call
def update_l3_policy(self, context, l3_policy_id, l3_policy):
l3p = l3_policy['l3_policy']
with context.session.begin(subtransactions=True):
@ -443,7 +443,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
l3p_db.update(l3p)
return self._make_l3_policy_dict(l3p_db)
@log.log
@log.log_method_call
def create_external_segment(self, context, external_segment):
es = external_segment['external_segment']
tenant_id = self._get_tenant_id_for_create(context, es)
@ -460,7 +460,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
self._process_segment_ers(context, es_db, es)
return self._make_external_segment_dict(es_db)
@log.log
@log.log_method_call
def get_external_segments(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -477,7 +477,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
return self._get_collection_count(context, ExternalSegmentMapping,
filters=filters)
@log.log
@log.log_method_call
def create_nat_pool(self, context, nat_pool):
np = nat_pool['nat_pool']
tenant_id = self._get_tenant_id_for_create(context, np)
@ -492,7 +492,7 @@ class GroupPolicyMappingDbPlugin(gpdb.GroupPolicyDbPlugin):
context.session.add(np_db)
return self._make_nat_pool_dict(np_db)
@log.log
@log.log_method_call
def get_nat_pools(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):

View File

@ -12,12 +12,13 @@
import ast
from neutron.common import log
from neutron._i18n import _LE
from neutron.db import common_db_mixin
from neutron.db import model_base
from neutron.db import models_v2
from neutron import manager
from neutron.plugins.common import constants as pconst
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
@ -168,7 +169,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
plugins = manager.NeutronManager.get_service_plugins()
grouppolicy_plugin = plugins.get(pconst.GROUP_POLICY)
if not grouppolicy_plugin:
LOG.error(_("No Grouppolicy service plugin found."))
LOG.error(_LE("No Grouppolicy service plugin found."))
raise s_exc.ServiceChainDeploymentError()
return grouppolicy_plugin
@ -255,7 +256,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
if service_type not in schain.sc_supported_type:
raise schain.ServiceTypeNotSupported(sc_service_type=service_type)
@log.log
@log.log_method_call
def create_servicechain_node(self, context, servicechain_node):
node = servicechain_node['servicechain_node']
tenant_id = self._get_tenant_id_for_create(context, node)
@ -269,7 +270,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
context.session.add(node_db)
return self._make_sc_node_dict(node_db)
@log.log
@log.log_method_call
def update_servicechain_node(self, context, servicechain_node_id,
servicechain_node, set_params=False):
node = servicechain_node['servicechain_node']
@ -287,7 +288,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
set_params=set_params)
return self._make_sc_node_dict(node_db)
@log.log
@log.log_method_call
def delete_servicechain_node(self, context, servicechain_node_id):
with context.session.begin(subtransactions=True):
node_db = self._get_servicechain_node(context,
@ -297,13 +298,13 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
node_id=servicechain_node_id)
context.session.delete(node_db)
@log.log
@log.log_method_call
def get_servicechain_node(self, context, servicechain_node_id,
fields=None):
node = self._get_servicechain_node(context, servicechain_node_id)
return self._make_sc_node_dict(node, fields)
@log.log
@log.log_method_call
def get_servicechain_nodes(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -316,7 +317,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_servicechain_nodes_count(self, context, filters=None):
return self._get_collection_count(context, ServiceChainNode,
filters=filters)
@ -419,7 +420,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
result.append(sci)
return result
@log.log
@log.log_method_call
def create_servicechain_spec(self, context, servicechain_spec,
set_params=True):
spec = servicechain_spec['servicechain_spec']
@ -435,7 +436,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
context.session.add(spec_db)
return self._make_sc_spec_dict(spec_db)
@log.log
@log.log_method_call
def update_servicechain_spec(self, context, spec_id,
servicechain_spec, set_params=True):
spec = servicechain_spec['servicechain_spec']
@ -447,7 +448,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
spec_db.update(spec)
return self._make_sc_spec_dict(spec_db)
@log.log
@log.log_method_call
def delete_servicechain_spec(self, context, spec_id):
policy_actions = self._grouppolicy_plugin.get_policy_actions(
context, filters={"action_value": [spec_id]})
@ -460,13 +461,13 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
raise schain.ServiceChainSpecInUse(spec_id=spec_id)
context.session.delete(spec_db)
@log.log
@log.log_method_call
def get_servicechain_spec(self, context, spec_id,
fields=None):
spec = self._get_servicechain_spec(context, spec_id)
return self._make_sc_spec_dict(spec, fields)
@log.log
@log.log_method_call
def get_servicechain_specs(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -479,12 +480,12 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_servicechain_specs_count(self, context, filters=None):
return self._get_collection_count(context, ServiceChainSpec,
filters=filters)
@log.log
@log.log_method_call
def create_servicechain_instance(self, context, servicechain_instance):
instance = servicechain_instance['servicechain_instance']
tenant_id = self._get_tenant_id_for_create(context, instance)
@ -514,7 +515,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
context.session.add(instance_db)
return self._make_sc_instance_dict(instance_db)
@log.log
@log.log_method_call
def update_servicechain_instance(self, context, servicechain_instance_id,
servicechain_instance):
instance = servicechain_instance['servicechain_instance']
@ -526,19 +527,19 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
instance_db.update(instance)
return self._make_sc_instance_dict(instance_db)
@log.log
@log.log_method_call
def delete_servicechain_instance(self, context, servicechain_instance_id):
with context.session.begin(subtransactions=True):
instance_db = self._get_servicechain_instance(
context, servicechain_instance_id)
context.session.delete(instance_db)
@log.log
@log.log_method_call
def get_servicechain_instance(self, context, sc_instance_id, fields=None):
instance_db = self._get_servicechain_instance(context, sc_instance_id)
return self._make_sc_instance_dict(instance_db, fields)
@log.log
@log.log_method_call
def get_servicechain_instances(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
@ -551,17 +552,17 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
marker_obj=marker_obj,
page_reverse=page_reverse)
@log.log
@log.log_method_call
def get_servicechain_instances_count(self, context, filters=None):
return self._get_collection_count(context, ServiceChainInstance,
filters=filters)
@log.log
@log.log_method_call
def get_service_profiles_count(self, context, filters=None):
return self._get_collection_count(context, ServiceProfile,
filters=filters)
@log.log
@log.log_method_call
def create_service_profile(self, context, service_profile):
profile = service_profile['service_profile']
tenant_id = self._get_tenant_id_for_create(context, profile)
@ -577,7 +578,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
context.session.add(profile_db)
return self._make_service_profile_dict(profile_db)
@log.log
@log.log_method_call
def update_service_profile(self, context, service_profile_id,
service_profile):
profile = service_profile['service_profile']
@ -587,7 +588,7 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
profile_db.update(profile)
return self._make_service_profile_dict(profile_db)
@log.log
@log.log_method_call
def delete_service_profile(self, context, service_profile_id):
with context.session.begin(subtransactions=True):
profile_db = self._get_service_profile(context,
@ -597,13 +598,13 @@ class ServiceChainDbPlugin(schain.ServiceChainPluginBase,
profile_id=service_profile_id)
context.session.delete(profile_db)
@log.log
@log.log_method_call
def get_service_profile(self, context, service_profile_id, fields=None):
profile_db = self._get_service_profile(
context, service_profile_id)
return self._make_service_profile_dict(profile_db, fields)
@log.log
@log.log_method_call
def get_service_profiles(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):

View File

@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from gbpservice.neutron.extensions import group_policy as gp
@ -52,7 +53,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
}
class Group_policy_mapping(object):
class Group_policy_mapping(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
@ -79,3 +80,7 @@ class Group_policy_mapping(object):
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
@classmethod
def get_plugin_interface(cls):
return gp.GroupPolicyPluginBase

View File

@ -10,9 +10,13 @@
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as l3_constants
from neutron.db import l3_db
from neutron.db import securitygroups_db
from neutron import manager
# Monkey patch create floatingip to allow subnet_id to be specified.
@ -81,7 +85,7 @@ l3_db.L3_NAT_dbonly_mixin.create_floatingip = create_floatingip
# REVISIT(ivar): Monkey patch to allow explicit router_id to be set in Neutron
# for Floating Ip creation (for internal calls only). Once we split the server,
# this could be part of a GBP Neutron L3 driver.
def get_assoc_data(self, context, fip, floating_network_id):
def _get_assoc_data(self, context, fip, floating_network_id):
(internal_port, internal_subnet_id,
internal_ip_address) = self._internal_fip_assoc_data(context, fip)
if fip.get('router_id'):
@ -95,7 +99,40 @@ def get_assoc_data(self, context, fip, floating_network_id):
return fip['port_id'], internal_ip_address, router_id
l3_db.L3_NAT_dbonly_mixin.get_assoc_data = get_assoc_data
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
previous_router_id = floatingip_db.router_id
port_id, internal_ip_address, router_id = (
self._check_and_get_fip_assoc(context, fip, floatingip_db))
floatingip_db.update({'fixed_ip_address': internal_ip_address,
'fixed_port_id': port_id,
'router_id': router_id,
'last_known_router_id': previous_router_id})
next_hop = None
if router_id:
router = self._get_router(context.elevated(), router_id)
gw_port = router.gw_port
if gw_port:
for fixed_ip in gw_port.fixed_ips:
addr = netaddr.IPAddress(fixed_ip.ip_address)
if addr.version == l3_constants.IP_VERSION_4:
next_hop = fixed_ip.ip_address
break
args = {'fixed_ip_address': internal_ip_address,
'fixed_port_id': port_id,
'router_id': router_id,
'last_known_router_id': previous_router_id,
'floating_ip_address': floatingip_db.floating_ip_address,
'floating_network_id': floatingip_db.floating_network_id,
'next_hop': next_hop,
'context': context}
registry.notify(resources.FLOATING_IP,
events.AFTER_UPDATE,
self._update_fip_assoc,
**args)
l3_db.L3_NAT_dbonly_mixin._get_assoc_data = _get_assoc_data
l3_db.L3_NAT_dbonly_mixin._update_fip_assoc = _update_fip_assoc
# REVISIT(ivar): Neutron adds a tenant filter on SG lookup for a given port,
@ -130,9 +167,3 @@ def _get_security_groups_on_port(self, context, port):
securitygroups_db.SecurityGroupDbMixin._get_security_groups_on_port = (
_get_security_groups_on_port)
def _load_flavors_manager(self):
pass
manager.NeutronManager._load_flavors_manager = _load_flavors_manager

View File

@ -49,7 +49,7 @@ def delete_network(self, context, id):
attempt = 0
while True:
attempt += 1
LOG.info(_("Attempt %(attempt)s to delete network %(net)s"),
LOG.info(i18n._LI("Attempt %(attempt)s to delete network %(net)s"),
{'attempt': attempt, 'net': id})
if attempt > 100:
raise InfiniteLoopError()
@ -150,7 +150,7 @@ def delete_subnet(self, context, id):
attempt = 0
while True:
attempt += 1
LOG.info(_("Attempt %(attempt)s to delete subnet %(subnet)s"),
LOG.info(i18n._LI("Attempt %(attempt)s to delete subnet %(subnet)s"),
{'attempt': attempt, 'subnet': id})
if attempt > 100:
raise InfiniteLoopError()

View File

@ -48,8 +48,8 @@ class APICMechanismGBPDriver(mech_agent.AgentMechanismDriverBase):
network_type = segment[api.NETWORK_TYPE]
if network_type == ofcst.TYPE_OPFLEX:
opflex_mappings = agent['configurations'].get('opflex_networks')
LOG.debug(_("Checking segment: %(segment)s "
"for mappings: %(mappings)s "),
LOG.debug("Checking segment: %(segment)s "
"for mappings: %(mappings)s ",
{'segment': segment, 'mappings': opflex_mappings})
return ((opflex_mappings is None) or
(segment[api.PHYSICAL_NETWORK] in opflex_mappings))

View File

@ -12,10 +12,11 @@
from keystoneclient import exceptions as k_exceptions
from keystoneclient.v2_0 import client as k_client
from neutron.common import log
from neutron._i18n import _LE
from neutron.db import model_base
from neutron.db import models_v2
from oslo_config import cfg
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
@ -80,7 +81,7 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
policy resources to various service chain constructs.
"""
@log.log
@log.log_method_call
def initialize(self):
self._cached_agent_notifier = None
self.chain_owner = ChainMappingDriver.chain_tenant_id(reraise=True)
@ -97,10 +98,10 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
return tenant.id
except k_exceptions.NotFound:
with excutils.save_and_reraise_exception(reraise=reraise):
LOG.error(_('No tenant with name %s exists.'), tenant)
LOG.error(_LE('No tenant with name %s exists.'), tenant)
except k_exceptions.NoUniqueMatch:
with excutils.save_and_reraise_exception(reraise=reraise):
LOG.error(_('Multiple tenants matches found for %s'),
LOG.error(_LE('Multiple tenants matches found for %s'),
tenant)
@staticmethod
@ -119,7 +120,7 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
return k_client.Client(username=user, password=pwd,
auth_url=auth_url)
@log.log
@log.log_method_call
def create_policy_target_postcommit(self, context):
if not context._plugin._is_service_target(context._plugin_context,
context.current['id']):
@ -134,12 +135,12 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
chain_context, context.current,
mapping.servicechain_instance_id)
@log.log
@log.log_method_call
def delete_policy_target_precommit(self, context):
context._is_service_target = context._plugin._is_service_target(
context._plugin_context, context.current['id'])
@log.log
@log.log_method_call
def delete_policy_target_postcommit(self, context):
if not context._is_service_target:
mappings = self._get_ptg_servicechain_mapping(
@ -153,11 +154,11 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
chain_context, context.current,
mapping.servicechain_instance_id)
@log.log
@log.log_method_call
def create_policy_target_group_precommit(self, context):
self._validate_ptg_prss(context, context.current)
@log.log
@log.log_method_call
def create_policy_target_group_postcommit(self, context):
if (context.current['provided_policy_rule_sets'] and not
context.current.get('proxied_group_id')):
@ -166,12 +167,12 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
providing_ptg=context.current)
self._handle_prs_added(context)
@log.log
@log.log_method_call
def update_policy_target_group_precommit(self, context):
self._validate_ptg_prss(context, context.current)
self._stash_ptg_modified_chains(context)
@log.log
@log.log_method_call
def update_policy_target_group_postcommit(self, context):
#Update service chain instance when any ruleset is changed
orig = context.original
@ -193,19 +194,19 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
providing_ptg=context.current)
self._handle_prs_updated(context)
@log.log
@log.log_method_call
def delete_policy_target_group_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_target_group_postcommit(self, context):
self._handle_prs_removed(context)
@log.log
@log.log_method_call
def update_policy_classifier_postcommit(self, context):
self._handle_classifier_update_notification(context)
@log.log
@log.log_method_call
def create_policy_action_precommit(self, context):
spec_id = context.current['action_value']
if spec_id:
@ -215,15 +216,15 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
if not spec.get('shared', False):
self._reject_shared(context.current, 'policy_action')
@log.log
@log.log_method_call
def update_policy_action_postcommit(self, context):
self._handle_redirect_spec_id_update(context)
@log.log
@log.log_method_call
def create_policy_rule_precommit(self, context):
self._reject_multiple_redirects_in_rule(context)
@log.log
@log.log_method_call
def update_policy_rule_precommit(self, context):
self._reject_multiple_redirects_in_rule(context)
old_redirect = self._get_redirect_action(context, context.original)
@ -237,7 +238,7 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
# Make sure the PRS can have a new redirect action
self._validate_new_prs_redirect(context, prs)
@log.log
@log.log_method_call
def update_policy_rule_postcommit(self, context):
old_classifier_id = context.original['policy_classifier_id']
new_classifier_id = context.current['policy_classifier_id']
@ -260,17 +261,17 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
if old_redirect_policy_actions or new_redirect_policy_actions:
self._handle_redirect_action(context, policy_rule_sets)
@log.log
@log.log_method_call
def create_policy_rule_set_precommit(self, context):
self._reject_multiple_redirects_in_prs(context)
@log.log
@log.log_method_call
def create_policy_rule_set_postcommit(self, context):
if context.current['child_policy_rule_sets']:
self._handle_redirect_action(
context, context.current['child_policy_rule_sets'])
@log.log
@log.log_method_call
def update_policy_rule_set_precommit(self, context):
self._reject_multiple_redirects_in_prs(context)
# If a redirect action is added (from 0 to one) we have to validate
@ -283,7 +284,7 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
if new_red_count > old_red_count:
self._validate_new_prs_redirect(context, context.current)
@log.log
@log.log_method_call
def update_policy_rule_set_postcommit(self, context):
# Handle any Redirects from the current Policy Rule Set
self._handle_redirect_action(context, [context.current['id']])
@ -297,21 +298,21 @@ class ChainMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._handle_redirect_action(
context, context.current['child_policy_rule_sets'])
@log.log
@log.log_method_call
def delete_policy_rule_set_postcommit(self, context):
if context.current['child_policy_rule_sets']:
self._handle_redirect_action(
context, context.current['child_policy_rule_sets'])
@log.log
@log.log_method_call
def create_external_policy_postcommit(self, context):
self._handle_prs_added(context)
@log.log
@log.log_method_call
def update_external_policy_postcommit(self, context):
self._handle_prs_updated(context)
@log.log
@log.log_method_call
def delete_external_policy_postcommit(self, context):
self._handle_prs_removed(context)

View File

@ -18,6 +18,9 @@ from apic_ml2.neutron.plugins.ml2.drivers.cisco.apic import apic_model
from apic_ml2.neutron.plugins.ml2.drivers.cisco.apic import config
from apicapi import apic_manager
from keystoneclient.v2_0 import client as keyclient
from neutron._i18n import _LE
from neutron._i18n import _LI
from neutron._i18n import _LW
from neutron.agent.linux import dhcp
from neutron.api.v2 import attributes
from neutron.common import constants as n_constants
@ -246,7 +249,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
port = self._get_port(context, new_id)
ip_owner_info['port'] = port['id']
except n_exc.PortNotFound:
LOG.warning(_("Proxied port %s could not be found"),
LOG.warning(_LW("Proxied port %s could not be found"),
new_id)
return super(ApicMappingDriver, self).update_ip_owner(ip_owner_info)
@ -264,7 +267,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
port_context = self._core_plugin.get_bound_port_context(
context, port_id, kwargs['host'])
if not port_context:
LOG.warning(_("Device %(device)s requested by agent "
LOG.warning(_LW("Device %(device)s requested by agent "
"%(agent_id)s not found in database"),
{'device': port_id,
'agent_id': kwargs.get('agent_id')})
@ -284,7 +287,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
ptg, pt = self._port_id_to_ptg(context, port['id'])
switched = True
except n_exc.PortNotFound:
LOG.warning(_("Proxied port %s could not be found"),
LOG.warning(_LW("Proxied port %s could not be found"),
new_id)
l2p = self._network_id_to_l2p(context, port['network_id'])
@ -381,14 +384,14 @@ class ApicMappingDriver(api.ResourceMappingDriver,
host_snat_ips)
ptg = proxied
else:
LOG.info(_("Active master has changed for PT %s"),
LOG.info(_LI("Active master has changed for PT %s"),
pt['id'])
# There's no master mac even if a cluster_id is set.
# Active chain head must have changed in a concurrent
# operation, get out of here
pass
except Exception as e:
LOG.error(_("An exception has occurred while retrieving device "
LOG.error(_LE("An exception has occurred while retrieving device "
"gbp details for %(device)s with error %(error)s"),
{'device': kwargs.get('device'), 'error': e.message})
details = {'device': kwargs.get('device')}
@ -401,7 +404,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
filters={'name': [HOST_SNAT_POOL],
'network_id': [network['id']]})
if not snat_subnets:
LOG.info(_("Subnet for host-SNAT-pool could not be found "
LOG.info(_LI("Subnet for host-SNAT-pool could not be found "
"for external network %(net_id)s. SNAT will not "
"function on this network"), {'net_id': network['id']})
return {}
@ -428,7 +431,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
if port and port['fixed_ips'][0]:
snat_ip = port['fixed_ips'][0]['ip_address']
else:
LOG.warning(_("SNAT-port creation failed for subnet "
LOG.warning(_LW("SNAT-port creation failed for subnet "
"%(subnet_id)s on external network "
"%(net_id)s. SNAT will not function on"
"host %(host)s for this network"),
@ -875,7 +878,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
# parent method the notification will not be done
self._notify_port_update(context._plugin_context, port['id'])
except n_exc.PortNotFound:
LOG.warning(_("Port %s is missing") % context.current['port_id'])
LOG.warning(_LW("Port %s is missing") % context.current['port_id'])
return
def delete_policy_target_group_precommit(self, context):
@ -1344,7 +1347,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
def process_port_deleted(self, context, port):
# do nothing for floating-ip ports
if port['device_owner'] == n_constants.DEVICE_OWNER_FLOATINGIP:
LOG.debug(_("Ignoring floating-ip port %s") % port['id'])
LOG.debug("Ignoring floating-ip port %s", port['id'])
return
try:
self.gbp_plugin.delete_policy_target(
@ -1478,8 +1481,8 @@ class ApicMappingDriver(api.ResourceMappingDriver,
is_shadow = bool(l3policy_obj)
ext_info = self.apic_manager.ext_net_dict.get(es['name'])
if not ext_info:
LOG.warning(_("External Segment %s is not managed by APIC "
"mapping driver.") % es['id'])
LOG.warning(_LW("External Segment %s is not managed by APIC "
"mapping driver.") % es['id'])
return
pre_existing = (False if is_shadow else self._is_pre_existing(es))
pfx = self._get_shadow_prefix(plugin_context, is_shadow, l3policy_obj)
@ -2518,7 +2521,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
interface_info)
except n_exc.BadRequest:
self._delete_port(plugin_context, port['id'])
LOG.exception(_("Adding subnet to router with "
LOG.exception(_LE("Adding subnet to router with "
"explicit port failed"))
def _detach_router_from_subnets(self, plugin_context, router_id, sn_ids):
@ -2560,8 +2563,8 @@ class ApicMappingDriver(api.ResourceMappingDriver,
if not (l3out_info.get('vrf_name') and
l3out_info.get('vrf_tenant')):
LOG.warning(
_("External routed network %s doesn't have private "
"network set") % es['name'])
_LW("External routed network %s doesn't have private "
"network set") % es['name'])
return
es_tenant = l3out_info['l3out_tenant']
nat_vrf_name = self.name_mapper.name_mapper.pre_existing(
@ -2596,7 +2599,7 @@ class ApicMappingDriver(api.ResourceMappingDriver,
self.apic_manager.ensure_subnet_created_on_apic(nat_epg_tenant,
nat_bd_name, gw + '/' + plen, transaction=trs)
if not es['subnet_id']:
LOG.warning(_("No associated subnet found for"
LOG.warning(_LW("No associated subnet found for"
"external segment %(es_id)s. SNAT "
"will not function for this network"),
{'es_id': es['id']})
@ -2620,11 +2623,12 @@ class ApicMappingDriver(api.ResourceMappingDriver,
attributes.ATTR_NOT_SPECIFIED,
'dns_nameservers': attributes.ATTR_NOT_SPECIFIED,
'host_routes':
attributes.ATTR_NOT_SPECIFIED}
attributes.ATTR_NOT_SPECIFIED,
'tenant_id': es['tenant_id']}
subnet = self._create_subnet(context._plugin_context,
attrs)
if not subnet:
LOG.warning(_("Subnet %(pool) creation failed for "
LOG.warning(_LW("Subnet %(pool) creation failed for "
"external network %(net_id)s. SNAT "
"will not function for this network"),
{'pool': HOST_SNAT_POOL,
@ -2656,8 +2660,8 @@ class ApicMappingDriver(api.ResourceMappingDriver,
if not (l3out_info.get('vrf_name') and
l3out_info.get('vrf_tenant')):
LOG.warning(
_("External routed network %s doesn't have private "
"network set") % es['name'])
_LW("External routed network %s doesn't have private "
"network set") % es['name'])
return
es_tenant = l3out_info['l3out_tenant']
nat_vrf_name = self.name_mapper.name_mapper.pre_existing(

View File

@ -10,8 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import auth as ks_auth
from keystoneclient import session as ks_session
from keystoneauth1 import loading as ks_loading
from neutron._i18n import _LW
from neutron.notifiers import nova as n_nova
from novaclient import client as nclient
from novaclient import exceptions as nova_exceptions
@ -21,40 +21,24 @@ from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class NovaClient:
class NovaClient(object):
def __init__(self):
auth = ks_auth.load_from_conf_options(cfg.CONF, 'nova')
endpoint_override = None
if not auth:
if cfg.CONF.nova_admin_tenant_id:
endpoint_override = "%s/%s" % (cfg.CONF.nova_url,
cfg.CONF.nova_admin_tenant_id)
auth = n_nova.DefaultAuthPlugin(
auth_url=cfg.CONF.nova_admin_auth_url,
username=cfg.CONF.nova_admin_username,
password=cfg.CONF.nova_admin_password,
tenant_id=cfg.CONF.nova_admin_tenant_id,
tenant_name=cfg.CONF.nova_admin_tenant_name,
endpoint_override=endpoint_override)
session = ks_session.Session.load_from_conf_options(
auth = ks_loading.load_auth_from_conf_options(cfg.CONF, 'nova')
session = ks_loading.load_session_from_conf_options(
cfg.CONF, 'nova', auth=auth)
novaclient_cls = nclient.get_client_class(n_nova.NOVA_API_VERSION)
self.nclient = novaclient_cls(
session=session,
region_name=cfg.CONF.nova.region_name)
self.nclient = nclient.Client(
n_nova.NOVA_API_VERSION, session=session,
region_name=cfg.CONF.nova.region_name,
endpoint_type=cfg.CONF.nova.endpoint_type)
def get_server(self, server_id):
try:
return self.client.servers.get(server_id)
except nova_exceptions.NotFound:
LOG.warning(_("Nova returned NotFound for server: %s"),
LOG.warning(_LW("Nova returned NotFound for server: %s"),
server_id)
except Exception as e:
LOG.exception(e)

View File

@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from oslo_log import helpers as log
from gbpservice.neutron.services.grouppolicy import (
group_policy_driver_api as api)
@ -18,222 +18,222 @@ from gbpservice.neutron.services.grouppolicy import (
class NoopDriver(api.PolicyDriver):
@log.log
@log.log_method_call
def initialize(self):
pass
@log.log
@log.log_method_call
def create_policy_target_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_target_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_target_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_target_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_target_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_target_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_target_group_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_target_group_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_target_group_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_target_group_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_target_group_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_target_group_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_l2_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_l2_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_l2_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_l2_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l2_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l2_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_l3_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_l3_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_l3_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_l3_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l3_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l3_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_network_service_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_network_service_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_network_service_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_network_service_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_network_service_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_network_service_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_classifier_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_classifier_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_classifier_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_classifier_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_classifier_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_classifier_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_action_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_action_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_action_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_action_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_action_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_action_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_rule_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_rule_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_rule_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_rule_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_rule_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_rule_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_rule_set_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_rule_set_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_rule_set_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_rule_set_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_rule_set_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_rule_set_postcommit(self, context):
pass

View File

@ -10,9 +10,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from neutron._i18n import _LI
from neutron._i18n import _LW
from neutron.db import model_base
from oslo_config import cfg
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import excutils
import sqlalchemy as sa
@ -85,7 +87,7 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
when the default value of None is specified.
"""
@log.log
@log.log_method_call
def initialize(self):
gpip = cfg.CONF.group_policy_implicit_policy
gpconf = cfg.CONF.group_policy
@ -101,12 +103,12 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
gpproxy.default_proxy_subnet_prefix_length)
self._default_es_name = gpip.default_external_segment_name
@log.log
@log.log_method_call
def create_policy_target_group_postcommit(self, context):
if not context.current['l2_policy_id']:
self._use_implicit_l2_policy(context)
@log.log
@log.log_method_call
def update_policy_target_group_postcommit(self, context):
old_l2p_id = context.original['l2_policy_id']
new_l2p_id = context.current['l2_policy_id']
@ -115,17 +117,17 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
if not new_l2p_id:
self._use_implicit_l2_policy(context)
@log.log
@log.log_method_call
def delete_policy_target_group_postcommit(self, context):
l2p_id = context.current['l2_policy_id']
self._cleanup_l2_policy(context, l2p_id)
@log.log
@log.log_method_call
def create_l2_policy_postcommit(self, context):
if not context.current['l3_policy_id']:
self._use_implicit_l3_policy(context)
@log.log
@log.log_method_call
def update_l2_policy_postcommit(self, context):
old_l3p_id = context.original['l3_policy_id']
new_l3p_id = context.current['l3_policy_id']
@ -134,12 +136,12 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
if not new_l3p_id:
self._use_implicit_l3_policy(context)
@log.log
@log.log_method_call
def delete_l2_policy_postcommit(self, context):
l3p_id = context.current['l3_policy_id']
self._cleanup_l3_policy(context, l3p_id)
@log.log
@log.log_method_call
def create_external_segment_precommit(self, context):
# REVISIT(ivar): find a better way to retrieve the default ES
if self._default_es_name == context.current['name']:
@ -150,16 +152,16 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
raise exc.DefaultExternalSegmentAlreadyExists(
es_name=self._default_es_name)
@log.log
@log.log_method_call
def create_external_policy_postcommit(self, context):
if not context.current['external_segments']:
self._use_implicit_external_segment(context)
@log.log
@log.log_method_call
def update_external_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_l3_policy_precommit(self, context):
if self._default_l3p_name == context.current['name']:
LOG.debug("Creating default L3 policy: %s", context.current)
@ -173,12 +175,12 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
raise exc.DefaultL3PolicyAlreadyExists(
l3p_name=self._default_l3p_name)
@log.log
@log.log_method_call
def create_l3_policy_postcommit(self, context):
if not context.current['external_segments']:
self._use_implicit_external_segment(context)
@log.log
@log.log_method_call
def update_l3_policy_postcommit(self, context):
pass
@ -207,8 +209,9 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
try:
self._delete_l2_policy(context._plugin_context, l2p_id)
except gbp_ext.L2PolicyInUse:
LOG.info(_("Cannot delete implicit L2 Policy %s because it's "
"in use."), l2p_id)
LOG.info(_LI(
"Cannot delete implicit L2 Policy %s because it's "
"in use."), l2p_id)
def _use_implicit_l3_policy(self, context):
tenant_id = context.current['tenant_id']
@ -243,16 +246,17 @@ class ImplicitPolicyDriver(api.PolicyDriver, local_api.LocalAPI):
filter)
l3p = l3ps and l3ps[0]
if not l3p:
LOG.warning(_("Caught DefaultL3PolicyAlreadyExists, "
"but default L3 policy not concurrently "
"created for tenant %s"), tenant_id)
LOG.warning(_LW(
"Caught DefaultL3PolicyAlreadyExists, "
"but default L3 policy not concurrently "
"created for tenant %s"), tenant_id)
ctxt.reraise = True
except exc.OverlappingIPPoolsInSameTenantNotAllowed:
with excutils.save_and_reraise_exception():
LOG.info(_("Caught "
"OverlappingIPPoolsinSameTenantNotAllowed "
"during creation of default L3 policy for "
"tenant %s"), tenant_id)
LOG.info(_LI("Caught "
"OverlappingIPPoolsinSameTenantNotAllowed "
"during creation of default L3 policy for "
"tenant %s"), tenant_id)
context.current['l3_policy_id'] = l3p['id']
context.set_l3_policy_id(l3p['id'])

View File

@ -13,6 +13,7 @@
import requests
from requests import auth
from neutron._i18n import _LI
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
@ -52,13 +53,13 @@ class OdlManager(object):
def __init__(self):
LOG.info(_("Configured ODL username: %s"),
LOG.info(_LI("Configured ODL username: %s"),
cfg.CONF.odl_driver.odl_username)
LOG.info(_("Configured ODL password: %s"),
LOG.info(_LI("Configured ODL password: %s"),
cfg.CONF.odl_driver.odl_password)
LOG.info(_("Configured ODL host: %s"),
LOG.info(_LI("Configured ODL host: %s"),
cfg.CONF.odl_driver.odl_host)
LOG.info(_("Configured ODL port: %s"),
LOG.info(_LI("Configured ODL port: %s"),
cfg.CONF.odl_driver.odl_port)
self._username = cfg.CONF.odl_driver.odl_username

View File

@ -13,16 +13,18 @@
import netaddr
import operator
from neutron._i18n import _LE
from neutron._i18n import _LW
from neutron.api.v2 import attributes
from neutron.common import constants as const
from neutron.common import exceptions as n_exc
from neutron.common import log
from neutron import context as n_context
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import l3 as ext_l3
from neutron.extensions import securitygroup as ext_sg
from oslo_config import cfg
from oslo_log import helpers as log
from oslo_log import log as logging
import sqlalchemy as sa
@ -116,7 +118,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
policy resources to various other neutron resources.
"""
@log.log
@log.log_method_call
def initialize(self):
self._cached_agent_notifier = None
@ -197,7 +199,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
router_id=router_id,
tenant_id=context.current['tenant_id'])
@log.log
@log.log_method_call
def create_policy_target_precommit(self, context):
self._validate_cluster_id(context)
if not context.current['policy_target_group_id']:
@ -222,7 +224,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
if pts:
exc.OnlyOneGroupDefaultGatewayAllowed(group_id=group_id)
@log.log
@log.log_method_call
def create_policy_target_postcommit(self, context):
if not context.current['port_id']:
self._use_implicit_port(context)
@ -281,9 +283,9 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
context, l2_policy_id)
fip_ids = []
if not external_segments:
LOG.error(_("Network Service Policy to allocate Floating IP "
"could not be applied because l3policy does "
"not have an attached external segment"))
LOG.error(_LE("Network Service Policy to allocate Floating IP "
"could not be applied because l3policy does "
"not have an attached external segment"))
return fip_ids
tenant_id = context.current['tenant_id']
@ -325,7 +327,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
no_subnet_pools = []
break
except n_exc.IpAddressGenerationFailure as ex:
LOG.warning(_("Floating allocation failed: %s"),
LOG.warning(_LW("Floating allocation failed: %s"),
ex.message)
for nat_pool in no_subnet_pools:
# Use old allocation method
@ -334,32 +336,32 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
context._plugin_context, tenant_id, ext_net_id, fixed_port)
break
except n_exc.IpAddressGenerationFailure as ex:
LOG.warning(_("Floating allocation failed: %s"),
LOG.warning(_LW("Floating allocation failed: %s"),
ex.message)
return fip_id
@log.log
@log.log_method_call
def update_policy_target_precommit(self, context):
self._validate_cluster_id(context)
if (context.current['policy_target_group_id'] !=
context.original['policy_target_group_id']):
raise exc.PolicyTargetGroupUpdateOfPolicyTargetNotSupported()
@log.log
@log.log_method_call
def update_policy_target_postcommit(self, context):
if context.current['cluster_id'] != context.original['cluster_id']:
self._update_cluster_membership(
context, new_cluster_id=context.current['cluster_id'],
old_cluster_id=context.original['cluster_id'])
@log.log
@log.log_method_call
def delete_policy_target_precommit(self, context):
self._validate_pt_in_use_by_cluster(context)
context.fips = self._get_pt_floating_ip_mapping(
context._plugin_context.session,
context.current['id'])
@log.log
@log.log_method_call
def delete_policy_target_postcommit(self, context):
sg_list = self._generate_list_of_sg_from_ptg(
context, context.current['policy_target_group_id'])
@ -373,14 +375,14 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._unset_proxy_gateway_routes(context, context.current)
self._cleanup_port(context._plugin_context, port_id)
@log.log
@log.log_method_call
def create_policy_target_group_precommit(self, context):
self._reject_cross_tenant_ptg_l2p(context)
self._validate_ptg_subnets(context)
self._validate_nat_pool_for_nsp(context)
self._validate_proxy_ptg(context)
@log.log
@log.log_method_call
def create_policy_target_group_postcommit(self, context):
# REVISIT(ivar) this validates the PTG L2P after the IPD creates it
# (which happens in the postcommit phase)
@ -454,9 +456,10 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
filters={'name': [
gpip.default_external_segment_name]}))
if not external_segments:
LOG.error(_("Network Service Policy to allocate Floating "
"IP could not be associated because l3policy "
"does not have an attached external segment"))
LOG.error(_LE(
"Network Service Policy to allocate Floating "
"IP could not be associated because l3policy "
"does not have an attached external segment"))
raise exc.NSPRequiresES()
for es in external_segments:
if not es['nat_pools']:
@ -478,9 +481,9 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
free_ip = self._get_last_free_ip(context._plugin_context,
context.current['subnets'])
if not free_ip:
LOG.error(_("Reserving IP Addresses failed for Network "
"Service Policy. No more IP Addresses on "
"subnet"))
LOG.error(_LE("Reserving IP Addresses failed for Network "
"Service Policy. No more IP Addresses on "
"subnet"))
return
# TODO(Magesh):Fetch subnet from PTG to which NSP is attached
self._remove_ip_from_allocation_pool(
@ -550,7 +553,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._delete_pt_floating_ip_mapping(
context._plugin_context.session, pt)
@log.log
@log.log_method_call
def update_policy_target_group_precommit(self, context):
if set(context.original['subnets']) - set(context.current['subnets']):
raise exc.PolicyTargetGroupSubnetRemovalNotSupported()
@ -561,7 +564,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
context.original['network_service_policy_id']):
self._validate_nat_pool_for_nsp(context)
@log.log
@log.log_method_call
def update_policy_target_group_postcommit(self, context):
# Three conditions where SG association needs to be changed
# (a) list of policy_targets change
@ -639,14 +642,14 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
if new_nsp:
self._handle_network_service_policy(context)
@log.log
@log.log_method_call
def delete_policy_target_group_precommit(self, context):
context.nsp_cleanup_ipaddress = self._get_ptg_policy_ipaddress_mapping(
context._plugin_context.session, context.current['id'])
context.nsp_cleanup_fips = self._get_ptg_policy_fip_mapping(
context._plugin_context.session, context.current['id'])
@log.log
@log.log_method_call
def delete_policy_target_group_postcommit(self, context):
self._cleanup_network_service_policy(context,
context.current,
@ -678,7 +681,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._stitch_ptg_to_l3p(context, proxied, l3p, proxied['subnets'])
@log.log
@log.log_method_call
def create_l2_policy_precommit(self, context):
self._reject_cross_tenant_l2p_l3p(context)
self._reject_non_shared_net_on_shared_l2p(context)
@ -686,12 +689,12 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
if not context.current['inject_default_route']:
raise exc.UnsettingInjectDefaultRouteOfL2PolicyNotSupported()
@log.log
@log.log_method_call
def create_l2_policy_postcommit(self, context):
if not context.current['network_id']:
self._use_implicit_network(context)
@log.log
@log.log_method_call
def update_l2_policy_precommit(self, context):
if (context.current['inject_default_route'] !=
context.original['inject_default_route']):
@ -702,20 +705,20 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._reject_cross_tenant_l2p_l3p(context)
self._reject_non_shared_net_on_shared_l2p(context)
@log.log
@log.log_method_call
def update_l2_policy_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l2_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l2_policy_postcommit(self, context):
network_id = context.current['network_id']
self._cleanup_network(context._plugin_context, network_id)
@log.log
@log.log_method_call
def create_l3_policy_precommit(self, context):
curr = context.current
if len(curr['routers']) > 1:
@ -745,7 +748,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
raise exc.MultipleESPerL3PolicyNotSupported()
self._reject_invalid_router_access(context)
@log.log
@log.log_method_call
def create_l3_policy_postcommit(self, context):
if not context.current['routers']:
self._use_implicit_router(context)
@ -756,7 +759,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._set_l3p_external_routes(context)
self._process_new_l3p_ip_pool(context, context.current['ip_pool'])
@log.log
@log.log_method_call
def update_l3_policy_precommit(self, context):
if context.current['routers'] != context.original['routers']:
raise exc.L3PolicyRoutersUpdateNotSupported()
@ -767,7 +770,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._reject_invalid_router_access(context)
self._validate_in_use_by_nsp(context)
@log.log
@log.log_method_call
def update_l3_policy_postcommit(self, context):
new, old = context.current, context.original
if new['external_segments'] != old['external_segments']:
@ -786,29 +789,29 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
for x in added))
self._set_l3p_external_routes(context, removed=removed)
@log.log
@log.log_method_call
def delete_l3_policy_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_l3_policy_postcommit(self, context):
for router_id in context.current['routers']:
self._cleanup_router(context._plugin_context, router_id)
self._process_remove_l3p_ip_pool(context, context.current['ip_pool'])
@log.log
@log.log_method_call
def create_policy_classifier_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_classifier_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_classifier_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_classifier_postcommit(self, context):
policy_rules = (context._plugin.get_policy_classifier(
context._plugin_context,
@ -825,51 +828,51 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._update_policy_rule_sg_rules(context, pr_sets,
policy_rule, context.original, context.current)
@log.log
@log.log_method_call
def delete_policy_classifier_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_classifier_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_action_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_action_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_action_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_action_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_action_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_policy_action_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_rule_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_policy_rule_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_rule_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_policy_rule_postcommit(self, context):
old_classifier_id = context.original['policy_classifier_id']
new_classifier_id = context.current['policy_classifier_id']
@ -887,7 +890,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._apply_policy_rule_set_rules(context, prs,
[context.current])
@log.log
@log.log_method_call
def delete_policy_rule_precommit(self, context):
# REVISIT(ivar): This will be removed once navigability issue is
# solved (bug/1384397)
@ -895,18 +898,18 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
context._plugin._get_policy_rule_policy_rule_sets(
context._plugin_context, context.current['id']))
@log.log
@log.log_method_call
def delete_policy_rule_postcommit(self, context):
for prs in context._plugin.get_policy_rule_sets(
context._plugin_context,
filters={'id': context.current['policy_rule_sets']}):
self._remove_policy_rule_set_rules(context, prs, [context.current])
@log.log
@log.log_method_call
def create_policy_rule_set_precommit(self, context):
self._reject_shared(context.current, 'policy_rule_set')
@log.log
@log.log_method_call
def create_policy_rule_set_postcommit(self, context):
# creating SGs
policy_rule_set_id = context.current['id']
@ -925,11 +928,11 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._recompute_policy_rule_sets(
context, context.current['child_policy_rule_sets'])
@log.log
@log.log_method_call
def update_policy_rule_set_precommit(self, context):
self._reject_shared(context.current, 'policy_rule_set')
@log.log
@log.log_method_call
def update_policy_rule_set_postcommit(self, context):
# Update policy_rule_set rules
old_rules = set(context.original['policy_rules'])
@ -949,14 +952,14 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
set(context.current['child_policy_rule_sets']))
self._recompute_policy_rule_sets(context, to_recompute)
@log.log
@log.log_method_call
def delete_policy_rule_set_precommit(self, context):
mapping = self._get_policy_rule_set_sg_mapping(
context._plugin_context.session, context.current['id'])
context._rmd_sg_list_temp = [mapping['provided_sg_id'],
mapping['consumed_sg_id']]
@log.log
@log.log_method_call
def delete_policy_rule_set_postcommit(self, context):
# Disassociate SGs
sg_list = context._rmd_sg_list_temp
@ -971,7 +974,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
for sg in sg_list:
self._delete_sg(context._plugin_context, sg)
@log.log
@log.log_method_call
def create_network_service_policy_precommit(self, context):
self._validate_nsp_parameters(context)
@ -1333,7 +1336,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
context.set_port_id(port_id)
return
except n_exc.IpAddressGenerationFailure as ex:
LOG.warning(_("No more address available in subnet %s"),
LOG.warning(_LW("No more address available in subnet %s"),
subnet['id'])
last = ex
raise last
@ -1343,7 +1346,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
try:
self._delete_port(plugin_context, port_id)
except n_exc.PortNotFound:
LOG.warning(_("Port %s is missing") % port_id)
LOG.warning(_LW("Port %s is missing") % port_id)
def _plug_router_to_external_segment(self, context, es_dict):
es_list = context._plugin.get_external_segments(
@ -1537,7 +1540,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
self._add_router_interface(plugin_context, router_id,
interface_info)
except n_exc.BadRequest:
LOG.exception(_("Adding subnet to router failed"))
LOG.exception(_LE("Adding subnet to router failed"))
raise exc.GroupPolicyInternalError()
def _generate_subnets_from_cidrs(self, context, l2p, l3p, cidrs,
@ -1578,7 +1581,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
context._plugin_context, subnet_id, router_id)
except n_exc.InvalidInput:
# This exception is not expected.
LOG.exception(_("adding subnet to router failed"))
LOG.exception(_LE("adding subnet to router failed"))
for subnet_id in subnet_ids:
self._delete_subnet(context._plugin_context, subnet_id)
raise exc.GroupPolicyInternalError()
@ -1627,7 +1630,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
except n_exc.InvalidInput:
# This exception is not expected.
# TODO(ivar): find a better way to rollback
LOG.exception(_("adding subnet to router failed"))
LOG.exception(_LE("adding subnet to router failed"))
for subnet_id in subnet_ids:
self._delete_subnet(context._plugin_context, subnet_id)
raise exc.GroupPolicyInternalError()
@ -1916,7 +1919,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
pt = context._plugin.get_policy_target(context._plugin_context,
pt_id)
except gp_ext.PolicyTargetNotFound:
LOG.warning(_("PT %s doesn't exist anymore"), pt_id)
LOG.warning(_LW("PT %s doesn't exist anymore"), pt_id)
return
try:
port_id = pt['port_id']
@ -1926,14 +1929,14 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
port[ext_sg.SECURITYGROUPS] = new_sg_list
self._update_port(context._plugin_context, port_id, port)
except n_exc.PortNotFound:
LOG.warning(_("Port %s is missing") % port_id)
LOG.warning(_LW("Port %s is missing") % port_id)
def _disassoc_sgs_from_pt(self, context, pt_id, sg_list):
try:
pt = context._plugin.get_policy_target(context._plugin_context,
pt_id)
except gp_ext.PolicyTargetNotFound:
LOG.warning(_("PT %s doesn't exist anymore"), pt_id)
LOG.warning(_LW("PT %s doesn't exist anymore"), pt_id)
return
port_id = pt['port_id']
self._disassoc_sgs_from_port(context._plugin_context, port_id, sg_list)
@ -1946,7 +1949,7 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
port[ext_sg.SECURITYGROUPS] = new_sg_list
self._update_port(plugin_context, port_id, port)
except n_exc.PortNotFound:
LOG.warning(_("Port %s is missing") % port_id)
LOG.warning(_LW("Port %s is missing") % port_id)
def _generate_list_of_sg_from_ptg(self, context, ptg_id):
ptg = context._plugin.get_policy_target_group(
@ -2540,6 +2543,11 @@ class ResourceMappingDriver(api.PolicyDriver, local_api.LocalAPI,
def _update_cluster_membership(self, context, new_cluster_id=None,
old_cluster_id=None):
# REVISIT: The following private attribute definition should not
# be used since the Neutron plugin contract only requires definition
# of the "supported_extension_aliases" property. This currently works
# since the ML2 plugin also defines the private property
# "_supported_extensions_aliases".
if ("allowed-address-pairs" in
self._core_plugin._supported_extension_aliases):
curr_port = self._get_port(

View File

@ -11,6 +11,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron._i18n import _LE
from neutron._i18n import _LI
from neutron.common import exceptions as n_exc
from oslo_config import cfg
from oslo_log import log
@ -30,14 +32,14 @@ class ExtensionManager(stevedore.named.NamedExtensionManager):
# the order in which the drivers are called.
self.ordered_ext_drivers = []
LOG.info(_("Configured extension driver names: %s"),
LOG.info(_LI("Configured extension driver names: %s"),
cfg.CONF.group_policy.extension_drivers)
super(ExtensionManager, self).__init__(
'gbpservice.neutron.group_policy.extension_drivers',
cfg.CONF.group_policy.extension_drivers,
invoke_on_load=True,
name_order=True)
LOG.info(_("Loaded extension driver names: %s"), self.names())
LOG.info(_LI("Loaded extension driver names: %s"), self.names())
self._register_drivers()
def _register_drivers(self):
@ -48,13 +50,13 @@ class ExtensionManager(stevedore.named.NamedExtensionManager):
"""
for ext in self:
self.ordered_ext_drivers.append(ext)
LOG.info(_("Registered extension drivers: %s"),
LOG.info(_LI("Registered extension drivers: %s"),
[driver.name for driver in self.ordered_ext_drivers])
def initialize(self):
# Initialize each driver in the list.
for driver in self.ordered_ext_drivers:
LOG.info(_("Initializing extension driver '%s'"), driver.name)
LOG.info(_LI("Initializing extension driver '%s'"), driver.name)
driver.obj.initialize()
def extension_aliases(self):
@ -62,7 +64,7 @@ class ExtensionManager(stevedore.named.NamedExtensionManager):
for driver in self.ordered_ext_drivers:
alias = driver.obj.extension_alias
exts.append(alias)
LOG.info(_("Got %(alias)s extension from driver '%(drv)s'"),
LOG.info(_LI("Got %(alias)s extension from driver '%(drv)s'"),
{'alias': alias, 'drv': driver.name})
return exts
@ -76,7 +78,7 @@ class ExtensionManager(stevedore.named.NamedExtensionManager):
raise
except Exception:
LOG.exception(
_("Extension driver '%(name)s' failed in %(method)s"),
_LE("Extension driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)

View File

@ -12,12 +12,14 @@
import netaddr
from neutron._i18n import _LE
from neutron._i18n import _LW
from neutron.api.v2 import attributes as nattr
from neutron.common import log
from neutron import context as n_ctx
from neutron.extensions import portbindings
from neutron import manager as n_manager
from neutron.plugins.common import constants as pconst
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import excutils
@ -65,7 +67,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
plugins = n_manager.NeutronManager.get_service_plugins()
servicechain_plugin = plugins.get(pconst.SERVICECHAIN)
if not servicechain_plugin:
LOG.error(_("No Servicechain service plugin found."))
LOG.error(_LE("No Servicechain service plugin found."))
raise gp_exc.GroupPolicyDeploymentError()
return servicechain_plugin
@ -328,7 +330,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
else:
return result
@log.log
@log.log_method_call
def create_policy_target(self, context, policy_target):
session = context.session
with session.begin(subtransactions=True):
@ -348,8 +350,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_policy_target_postcommit "
"failed, deleting policy_target %s"),
LOG.exception(_LE("create_policy_target_postcommit "
"failed, deleting policy_target %s"),
result['id'])
self.delete_policy_target(context, result['id'])
@ -357,7 +359,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
result.pop('port_attributes', None)
return result
@log.log
@log.log_method_call
def update_policy_target(self, context, policy_target_id, policy_target):
session = context.session
with session.begin(subtransactions=True):
@ -381,7 +383,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_policy_target
@log.log
@log.log_method_call
def delete_policy_target(self, context, policy_target_id):
session = context.session
with session.begin(subtransactions=True):
@ -397,8 +399,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_policy_target_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_policy_target_postcommit failed "
"for policy_target %s"),
LOG.exception(_LE("delete_policy_target_postcommit failed "
"for policy_target %s"),
policy_target_id)
def get_policy_target(self, context, policy_target_id, fields=None):
@ -425,7 +427,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in filtered_results]
@log.log
@log.log_method_call
def create_policy_target_group(self, context, policy_target_group):
session = context.session
with session.begin(subtransactions=True):
@ -446,14 +448,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_policy_target_group_postcommit "
"failed, deleting policy_target_group %s"),
LOG.exception(_LE("create_policy_target_group_postcommit "
"failed, deleting policy_target_group %s"),
result['id'])
self.delete_policy_target_group(context, result['id'])
return result
@log.log
@log.log_method_call
def update_policy_target_group(self, context, policy_target_group_id,
policy_target_group):
session = context.session
@ -492,7 +494,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
return updated_policy_target_group
@log.log
@log.log_method_call
def delete_policy_target_group(self, context, policy_target_group_id):
session = context.session
with session.begin(subtransactions=True):
@ -523,7 +525,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.delete_policy_target_group(
context, policy_target_group['proxy_group_id'])
except gpex.PolicyTargetGroupNotFound:
LOG.warning(_('PTG %s already deleted'),
LOG.warning(_LW('PTG %s already deleted'),
policy_target_group['proxy_group_id'])
with session.begin(subtransactions=True):
@ -541,8 +543,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_policy_target_group_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_policy_target_group_postcommit failed "
"for policy_target_group %s"),
LOG.exception(_LE("delete_policy_target_group_postcommit failed "
"for policy_target_group %s"),
policy_target_group_id)
def get_policy_target_group(self, context, policy_target_group_id,
@ -571,7 +573,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_l2_policy(self, context, l2_policy):
session = context.session
with session.begin(subtransactions=True):
@ -589,13 +591,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_l2_policy_postcommit "
"failed, deleting l2_policy %s"), result['id'])
LOG.exception(_LE("create_l2_policy_postcommit "
"failed, deleting l2_policy %s"),
result['id'])
self.delete_l2_policy(context, result['id'])
return result
@log.log
@log.log_method_call
def update_l2_policy(self, context, l2_policy_id, l2_policy):
session = context.session
with session.begin(subtransactions=True):
@ -617,7 +620,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_l2_policy
@log.log
@log.log_method_call
def delete_l2_policy(self, context, l2_policy_id):
session = context.session
with session.begin(subtransactions=True):
@ -633,9 +636,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_l2_policy_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_l2_policy_postcommit failed "
"for l2_policy %s"),
l2_policy_id)
LOG.exception(_LE("delete_l2_policy_postcommit failed "
"for l2_policy %s"), l2_policy_id)
def get_l2_policy(self, context, l2_policy_id, fields=None):
session = context.session
@ -661,7 +663,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_network_service_policy(self, context, network_service_policy):
session = context.session
with session.begin(subtransactions=True):
@ -683,14 +685,15 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_network_service_policy_postcommit "
"failed, deleting network_service_policy %s"),
result['id'])
LOG.exception(_LE(
"create_network_service_policy_postcommit "
"failed, deleting network_service_policy %s"),
result['id'])
self.delete_network_service_policy(context, result['id'])
return result
@log.log
@log.log_method_call
def update_network_service_policy(self, context, network_service_policy_id,
network_service_policy):
session = context.session
@ -718,7 +721,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_network_service_policy
@log.log
@log.log_method_call
def delete_network_service_policy(
self, context, network_service_policy_id):
session = context.session
@ -736,9 +739,9 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
pdm = self.policy_driver_manager
pdm.delete_network_service_policy_postcommit(policy_context)
except Exception:
LOG.exception(_("delete_network_service_policy_postcommit failed "
"for network_service_policy %s"),
network_service_policy_id)
LOG.exception(_LE(
"delete_network_service_policy_postcommit failed "
"for network_service_policy %s"), network_service_policy_id)
def get_network_service_policy(self, context, network_service_policy_id,
fields=None):
@ -767,7 +770,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_l3_policy(self, context, l3_policy):
session = context.session
with session.begin(subtransactions=True):
@ -787,13 +790,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_l3_policy_postcommit "
"failed, deleting l3_policy %s"), result['id'])
LOG.exception(_LE("create_l3_policy_postcommit "
"failed, deleting l3_policy %s"),
result['id'])
self.delete_l3_policy(context, result['id'])
return result
@log.log
@log.log_method_call
def update_l3_policy(self, context, l3_policy_id, l3_policy):
session = context.session
with session.begin(subtransactions=True):
@ -817,7 +821,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_l3_policy
@log.log
@log.log_method_call
def delete_l3_policy(self, context, l3_policy_id, check_unused=False):
session = context.session
with session.begin(subtransactions=True):
@ -837,9 +841,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_l3_policy_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_l3_policy_postcommit failed "
"for l3_policy %s"),
l3_policy_id)
LOG.exception(_LE("delete_l3_policy_postcommit failed "
"for l3_policy %s"), l3_policy_id)
return True
def get_l3_policy(self, context, l3_policy_id, fields=None):
@ -866,7 +869,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_policy_classifier(self, context, policy_classifier):
session = context.session
with session.begin(subtransactions=True):
@ -887,14 +890,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_(
LOG.exception(_LE(
"policy_driver_manager.create_policy_classifier_postcommit"
" failed, deleting policy_classifier %s"), result['id'])
self.delete_policy_classifier(context, result['id'])
return result
@log.log
@log.log_method_call
def update_policy_classifier(self, context, id, policy_classifier):
session = context.session
with session.begin(subtransactions=True):
@ -918,7 +921,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_policy_classifier
@log.log
@log.log_method_call
def delete_policy_classifier(self, context, id):
session = context.session
with session.begin(subtransactions=True):
@ -934,9 +937,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_policy_classifier_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_policy_classifier_postcommit failed "
"for policy_classifier %s"),
id)
LOG.exception(_LE("delete_policy_classifier_postcommit failed "
"for policy_classifier %s"), id)
def get_policy_classifier(self, context, policy_classifier_id,
fields=None):
@ -964,7 +966,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_policy_action(self, context, policy_action):
session = context.session
with session.begin(subtransactions=True):
@ -985,14 +987,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_(
LOG.exception(_LE(
"policy_driver_manager.create_policy_action_postcommit "
"failed, deleting policy_action %s"), result['id'])
self.delete_policy_action(context, result['id'])
return result
@log.log
@log.log_method_call
def update_policy_action(self, context, id, policy_action):
session = context.session
with session.begin(subtransactions=True):
@ -1017,7 +1019,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_policy_action
@log.log
@log.log_method_call
def delete_policy_action(self, context, id):
session = context.session
with session.begin(subtransactions=True):
@ -1032,9 +1034,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_policy_action_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_policy_action_postcommit failed "
"for policy_action %s"),
id)
LOG.exception(_LE("delete_policy_action_postcommit failed "
"for policy_action %s"), id)
def get_policy_action(self, context, policy_action_id, fields=None):
session = context.session
@ -1060,7 +1061,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_policy_rule(self, context, policy_rule):
session = context.session
with session.begin(subtransactions=True):
@ -1080,14 +1081,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_(
LOG.exception(_LE(
"policy_driver_manager.create_policy_rule_postcommit"
" failed, deleting policy_rule %s"), result['id'])
self.delete_policy_rule(context, result['id'])
return result
@log.log
@log.log_method_call
def update_policy_rule(self, context, id, policy_rule):
session = context.session
with session.begin(subtransactions=True):
@ -1110,7 +1111,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_policy_rule
@log.log
@log.log_method_call
def delete_policy_rule(self, context, id):
session = context.session
with session.begin(subtransactions=True):
@ -1126,9 +1127,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_policy_rule_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_policy_rule_postcommit failed "
"for policy_rule %s"),
id)
LOG.exception(_LE("delete_policy_rule_postcommit failed "
"for policy_rule %s"), id)
def get_policy_rule(self, context, policy_rule_id, fields=None):
session = context.session
@ -1154,7 +1154,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_policy_rule_set(self, context, policy_rule_set):
session = context.session
with session.begin(subtransactions=True):
@ -1175,14 +1175,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_(
LOG.exception(_LE(
"policy_driver_manager.create_policy_rule_set_postcommit "
"failed, deleting policy_rule_set %s"), result['id'])
self.delete_policy_rule_set(context, result['id'])
return result
@log.log
@log.log_method_call
def update_policy_rule_set(self, context, id, policy_rule_set):
session = context.session
with session.begin(subtransactions=True):
@ -1206,7 +1206,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_policy_rule_set
@log.log
@log.log_method_call
def delete_policy_rule_set(self, context, id):
session = context.session
with session.begin(subtransactions=True):
@ -1221,9 +1221,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_policy_rule_set_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_policy_rule_set_postcommit failed "
"for policy_rule_set %s"),
id)
LOG.exception(_LE("delete_policy_rule_set_postcommit failed "
"for policy_rule_set %s"), id)
def get_policy_rule_set(self, context, policy_rule_set_id, fields=None):
session = context.session
@ -1249,7 +1248,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_external_segment(self, context, external_segment):
session = context.session
with session.begin(subtransactions=True):
@ -1273,14 +1272,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
create_external_segment_postcommit(policy_context))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_external_segment_postcommit "
"failed, deleting external_segment "
"%s"), result['id'])
LOG.exception(_LE("create_external_segment_postcommit "
"failed, deleting external_segment "
"%s"), result['id'])
self.delete_external_segment(context, result['id'])
return result
@log.log
@log.log_method_call
def update_external_segment(self, context, external_segment_id,
external_segment):
session = context.session
@ -1310,7 +1309,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_external_segment
@log.log
@log.log_method_call
def delete_external_segment(self, context, external_segment_id):
session = context.session
with session.begin(subtransactions=True):
@ -1328,8 +1327,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
(self.policy_driver_manager.
delete_external_segment_postcommit(policy_context))
except Exception:
LOG.exception(_("delete_external_segment_postcommit failed "
"for external_segment %s"),
LOG.exception(_LE("delete_external_segment_postcommit failed "
"for external_segment %s"),
external_segment_id)
return True
@ -1358,7 +1357,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_external_policy(self, context, external_policy):
session = context.session
with session.begin(subtransactions=True):
@ -1379,14 +1378,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
create_external_policy_postcommit(policy_context))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_external_policy_postcommit "
"failed, deleting external_policy "
"%s"), result['id'])
LOG.exception(_LE("create_external_policy_postcommit "
"failed, deleting external_policy "
"%s"), result['id'])
self.delete_external_policy(context, result['id'])
return result
@log.log
@log.log_method_call
def update_external_policy(self, context, external_policy_id,
external_policy):
session = context.session
@ -1413,7 +1412,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
policy_context)
return updated_external_policy
@log.log
@log.log_method_call
def delete_external_policy(self, context, external_policy_id,
check_unused=False):
session = context.session
@ -1430,9 +1429,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_external_policy_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_external_policy_postcommit failed "
"for external_policy %s"),
external_policy_id)
LOG.exception(_LE("delete_external_policy_postcommit failed "
"for external_policy %s"), external_policy_id)
def get_external_policy(self, context, external_policy_id, fields=None):
session = context.session
@ -1459,7 +1457,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
filtered_results.append(filtered)
return [self._fields(result, fields) for result in results]
@log.log
@log.log_method_call
def create_nat_pool(self, context, nat_pool):
session = context.session
with session.begin(subtransactions=True):
@ -1477,13 +1475,14 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
create_nat_pool_postcommit(policy_context))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_("create_nat_pool_postcommit failed, deleting "
"nat_pool %s"), result['id'])
LOG.exception(_LE(
"create_nat_pool_postcommit failed, deleting "
"nat_pool %s"), result['id'])
self.delete_nat_pool(context, result['id'])
return result
@log.log
@log.log_method_call
def update_nat_pool(self, context, nat_pool_id, nat_pool):
session = context.session
with session.begin(subtransactions=True):
@ -1504,7 +1503,7 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.update_nat_pool_postcommit(policy_context)
return updated_nat_pool
@log.log
@log.log_method_call
def delete_nat_pool(self, context, nat_pool_id, check_unused=False):
session = context.session
with session.begin(subtransactions=True):
@ -1519,8 +1518,8 @@ class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
self.policy_driver_manager.delete_nat_pool_postcommit(
policy_context)
except Exception:
LOG.exception(_("delete_nat_pool_postcommit failed "
"for nat_pool %s"),
LOG.exception(_LE("delete_nat_pool_postcommit failed "
"for nat_pool %s"),
nat_pool_id)
def get_nat_pool(self, context, nat_pool_id, fields=None):

View File

@ -10,6 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _LE
from neutron._i18n import _LI
from oslo_config import cfg
from oslo_log import log
import stevedore
@ -62,14 +64,14 @@ class PolicyDriverManager(stevedore.named.NamedExtensionManager):
self.ordered_policy_drivers = []
self.reverse_ordered_policy_drivers = []
LOG.info(_("Configured policy driver names: %s"),
LOG.info(_LI("Configured policy driver names: %s"),
cfg.CONF.group_policy.policy_drivers)
super(PolicyDriverManager,
self).__init__('gbpservice.neutron.group_policy.policy_drivers',
cfg.CONF.group_policy.policy_drivers,
invoke_on_load=True,
name_order=True)
LOG.info(_("Loaded policy driver names: %s"), self.names())
LOG.info(_LI("Loaded policy driver names: %s"), self.names())
self._register_policy_drivers()
def _register_policy_drivers(self):
@ -83,7 +85,7 @@ class PolicyDriverManager(stevedore.named.NamedExtensionManager):
self.ordered_policy_drivers.append(ext)
self.reverse_ordered_policy_drivers = self.ordered_policy_drivers[::-1]
LOG.info(_("Registered policy drivers: %s"),
LOG.info(_LI("Registered policy drivers: %s"),
[driver.name for driver in self.ordered_policy_drivers])
def initialize(self):
@ -93,7 +95,7 @@ class PolicyDriverManager(stevedore.named.NamedExtensionManager):
# set it to True such that the drivers can override it.
self.native_bulk_support = False
for driver in self.ordered_policy_drivers:
LOG.info(_("Initializing policy driver '%s'"), driver.name)
LOG.info(_LI("Initializing policy driver '%s'"), driver.name)
driver.obj.initialize()
self.native_bulk_support &= getattr(driver.obj,
'native_bulk_support', True)
@ -122,7 +124,7 @@ class PolicyDriverManager(stevedore.named.NamedExtensionManager):
except Exception:
# This is an internal failure.
LOG.exception(
_("Policy driver '%(name)s' failed in %(method)s"),
_LE("Policy driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)
error = True

View File

@ -10,6 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _LE
from neutron._i18n import _LI
from oslo_config import cfg
from oslo_log import log
import stevedore
@ -37,14 +39,14 @@ class DriverManager(stevedore.named.NamedExtensionManager):
# the order in which the drivers are called.
self.ordered_drivers = []
LOG.info(_("Configured servicechain driver names: %s"),
LOG.info(_LI("Configured servicechain driver names: %s"),
cfg.CONF.servicechain.servicechain_drivers)
super(DriverManager,
self).__init__(
'gbpservice.neutron.servicechain.servicechain_drivers',
cfg.CONF.servicechain.servicechain_drivers,
invoke_on_load=True, name_order=True)
LOG.info(_("Loaded servicechain driver names: %s"), self.names())
LOG.info(_LI("Loaded servicechain driver names: %s"), self.names())
self._register_drivers()
def _register_drivers(self):
@ -56,14 +58,14 @@ class DriverManager(stevedore.named.NamedExtensionManager):
for ext in self:
self.drivers[ext.name] = ext
self.ordered_drivers.append(ext)
LOG.info(_("Registered servicechain drivers: %s"),
LOG.info(_LI("Registered servicechain drivers: %s"),
[driver.name for driver in self.ordered_drivers])
def initialize(self):
# ServiceChain bulk operations requires each driver to support them
self.native_bulk_support = True
for driver in self.ordered_drivers:
LOG.info(_("Initializing servicechain driver '%s'"), driver.name)
LOG.info(_LI("Initializing servicechain driver '%s'"), driver.name)
driver.obj.initialize()
self.native_bulk_support &= getattr(driver.obj,
'native_bulk_support', True)
@ -88,7 +90,7 @@ class DriverManager(stevedore.named.NamedExtensionManager):
except Exception:
# This is an internal failure.
LOG.exception(
_("ServiceChain driver '%(name)s' failed in %(method)s"),
_LE("ServiceChain driver '%(name)s' failed in %(method)s"),
{'name': driver.name, 'method': method_name}
)
error = True

View File

@ -10,107 +10,107 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from oslo_log import helpers as log
class NoopDriver(object):
@log.log
@log.log_method_call
def initialize(self):
pass
@log.log
@log.log_method_call
def create_servicechain_node_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_node_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_node_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_node_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_node_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_node_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_spec_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_spec_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_spec_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_spec_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_spec_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_spec_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_instance_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_instance_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_instance_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_instance_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_instance_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_instance_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_service_profile_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_service_profile_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_service_profile_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_service_profile_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_service_profile_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_service_profile_postcommit(self, context):
pass

View File

@ -16,11 +16,13 @@ import time
from heatclient import client as heat_client
from heatclient import exc as heat_exc
from keystoneclient.v2_0 import client as keyclient
from neutron.common import log
from neutron._i18n import _LE
from neutron._i18n import _LW
from neutron.db import model_base
from neutron import manager
from neutron.plugins.common import constants as pconst
from oslo_config import cfg
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_serialization import jsonutils
import sqlalchemy as sa
@ -29,6 +31,7 @@ from gbpservice.neutron.services.servicechain.common import exceptions as exc
LOG = logging.getLogger(__name__)
cfg.CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token')
service_chain_opts = [
cfg.IntOpt('stack_delete_retries',
@ -69,24 +72,24 @@ class ServiceChainInstanceStack(model_base.BASEV2):
class SimpleChainDriver(object):
@log.log
@log.log_method_call
def initialize(self):
pass
@log.log
@log.log_method_call
def create_servicechain_node_precommit(self, context):
if context.current['service_profile_id'] is None:
if context.current['service_type'] not in sc_supported_type:
raise exc.InvalidServiceTypeForReferenceDriver()
elif context.current['service_type']:
LOG.warning(_('Both service_profile_id and service_type are'
'specified, service_type will be ignored.'))
LOG.warning(_LW('Both service_profile_id and service_type are'
'specified, service_type will be ignored.'))
@log.log
@log.log_method_call
def create_servicechain_node_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_node_precommit(self, context):
if (context.original['config'] != context.current['config']):
filters = {'servicechain_spec': context.original[
@ -96,31 +99,31 @@ class SimpleChainDriver(object):
if sc_instances:
raise exc.NodeUpdateNotSupported()
@log.log
@log.log_method_call
def update_servicechain_node_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_node_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_node_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_spec_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_spec_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_spec_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_spec_postcommit(self, context):
if context.original['nodes'] != context.current['nodes']:
filters = {'servicechain_spec': [context.original['id']]}
@ -131,19 +134,19 @@ class SimpleChainDriver(object):
sc_instance,
context._sc_spec)
@log.log
@log.log_method_call
def delete_servicechain_spec_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_spec_postcommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_instance_precommit(self, context):
pass
@log.log
@log.log_method_call
def create_servicechain_instance_postcommit(self, context):
sc_instance = context.current
sc_spec_ids = sc_instance.get('servicechain_specs')
@ -154,11 +157,11 @@ class SimpleChainDriver(object):
self._create_servicechain_instance_stacks(context, sc_node_ids,
sc_instance, sc_spec)
@log.log
@log.log_method_call
def update_servicechain_instance_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_servicechain_instance_postcommit(self, context):
original_spec_ids = context.original.get('servicechain_specs')
new_spec_ids = context.current.get('servicechain_specs')
@ -169,37 +172,37 @@ class SimpleChainDriver(object):
self._update_servicechain_instance(context, context.current,
newspec)
@log.log
@log.log_method_call
def delete_servicechain_instance_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_servicechain_instance_postcommit(self, context):
self._delete_servicechain_instance_stacks(context._plugin_context,
context.current['id'])
@log.log
@log.log_method_call
def create_service_profile_precommit(self, context):
if context.current['service_type'] not in sc_supported_type:
raise exc.InvalidServiceTypeForReferenceDriver()
@log.log
@log.log_method_call
def create_service_profile_postcommit(self, context):
pass
@log.log
@log.log_method_call
def update_service_profile_precommit(self, context):
pass
@log.log
@log.log_method_call
def update_service_profile_postcommit(self, context):
pass
@log.log
@log.log_method_call
def delete_service_profile_precommit(self, context):
pass
@log.log
@log.log_method_call
def delete_service_profile_postcommit(self, context):
pass
@ -242,7 +245,7 @@ class SimpleChainDriver(object):
stack_template = sc_node.get('config')
# TODO(magesh):Raise an exception ??
if not stack_template:
LOG.error(_("Service Config is not defined for the service"
LOG.error(_LE("Service Config is not defined for the service"
" chain Node"))
return
stack_template = jsonutils.loads(stack_template)
@ -335,22 +338,24 @@ class SimpleChainDriver(object):
elif stack.stack_status == 'DELETE_FAILED':
heatclient.delete(stack_id)
except Exception:
LOG.exception(_("Service Chain Instance cleanup may not have "
"happened because Heat API request failed "
"while waiting for the stack %(stack)s to be "
"deleted"), {'stack': stack_id})
LOG.exception(_LE(
"Service Chain Instance cleanup may not have "
"happened because Heat API request failed "
"while waiting for the stack %(stack)s to be "
"deleted"), {'stack': stack_id})
return
else:
time.sleep(STACK_DELETE_RETRY_WAIT)
stack_delete_retries = stack_delete_retries - 1
if stack_delete_retries == 0:
LOG.warning(_("Resource cleanup for service chain instance"
" is not completed within %(wait)s seconds"
" as deletion of Stack %(stack)s is not"
" completed"),
{'wait': (STACK_DELETE_RETRIES *
STACK_DELETE_RETRY_WAIT),
'stack': stack_id})
LOG.warning(_LW(
"Resource cleanup for service chain instance"
" is not completed within %(wait)s seconds"
" as deletion of Stack %(stack)s is not"
" completed"),
{'wait': (STACK_DELETE_RETRIES *
STACK_DELETE_RETRY_WAIT),
'stack': stack_id})
return
else:
continue
@ -409,12 +414,12 @@ class SimpleChainDriver(object):
plugins = manager.NeutronManager.get_service_plugins()
grouppolicy_plugin = plugins.get(pconst.GROUP_POLICY)
if not grouppolicy_plugin:
LOG.error(_("No Grouppolicy service plugin found."))
LOG.error(_LE("No Grouppolicy service plugin found."))
raise exc.ServiceChainDeploymentError()
return grouppolicy_plugin
class HeatClient:
class HeatClient(object):
def __init__(self, context, password=None):
api_version = "1"
@ -447,8 +452,9 @@ class HeatClient:
try:
self.stacks.delete(stack_id)
except heat_exc.HTTPNotFound:
LOG.warning(_("Stack %(stack)s created by service chain driver is "
"not found at cleanup"), {'stack': stack_id})
LOG.warning(_LW(
"Stack %(stack)s created by service chain driver is "
"not found at cleanup"), {'stack': stack_id})
def get(self, stack_id):
return self.stacks.get(stack_id)

View File

@ -10,8 +10,9 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from neutron._i18n import _LE
from neutron.plugins.common import constants as pconst
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import excutils
@ -41,7 +42,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
super(ServiceChainPlugin, self).__init__()
self.driver_manager.initialize()
@log.log
@log.log_method_call
def create_servicechain_node(self, context, servicechain_node):
session = context.session
with session.begin(subtransactions=True):
@ -58,14 +59,14 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
sc_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_("driver_manager.create_servicechain_postcommit "
"failed, deleting servicechain_node %s"),
LOG.error(_LE("driver_manager.create_servicechain_postcommit "
"failed, deleting servicechain_node %s"),
result['id'])
self.delete_servicechain_node(context, result['id'])
return result
@log.log
@log.log_method_call
def update_servicechain_node(self, context, servicechain_node_id,
servicechain_node):
session = context.session
@ -89,7 +90,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
return updated_sc_node
@log.log
@log.log_method_call
def delete_servicechain_node(self, context, servicechain_node_id):
session = context.session
with session.begin(subtransactions=True):
@ -106,11 +107,11 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
self.driver_manager.delete_servicechain_node_postcommit(
sc_context)
except Exception:
LOG.exception(_("delete_servicechain_node_postcommit failed "
LOG.exception(_LE("delete_servicechain_node_postcommit failed "
"for servicechain_node %s"),
servicechain_node_id)
@log.log
@log.log_method_call
def create_servicechain_spec(self, context, servicechain_spec):
session = context.session
with session.begin(subtransactions=True):
@ -126,14 +127,14 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
self.driver_manager.create_servicechain_spec_postcommit(sc_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_("driver_manager.create_servicechain_postcommit "
"failed, deleting servicechain_spec %s"),
LOG.error(_LE("driver_manager.create_servicechain_postcommit "
"failed, deleting servicechain_spec %s"),
result['id'])
self.delete_servicechain_spec(context, result['id'])
return result
@log.log
@log.log_method_call
def update_servicechain_spec(self, context, servicechain_spec_id,
servicechain_spec):
session = context.session
@ -156,7 +157,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
return updated_sc_spec
@log.log
@log.log_method_call
def delete_servicechain_spec(self, context, servicechain_spec_id):
session = context.session
with session.begin(subtransactions=True):
@ -172,11 +173,11 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
try:
self.driver_manager.delete_servicechain_spec_postcommit(sc_context)
except Exception:
LOG.exception(_("delete_servicechain_spec_postcommit failed "
LOG.exception(_LE("delete_servicechain_spec_postcommit failed "
"for servicechain_spec %s"),
servicechain_spec_id)
@log.log
@log.log_method_call
def create_servicechain_instance(self, context, servicechain_instance):
session = context.session
with session.begin(subtransactions=True):
@ -193,7 +194,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
sc_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_(
LOG.error(_LE(
"driver_manager.create_servicechain_instance_postcommit "
"failed, deleting servicechain_instance %s"),
result['id'])
@ -201,7 +202,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
return result
@log.log
@log.log_method_call
def update_servicechain_instance(self, context,
servicechain_instance_id,
servicechain_instance):
@ -223,7 +224,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
sc_context)
return updated_sc_instance
@log.log
@log.log_method_call
def delete_servicechain_instance(self, context, servicechain_instance_id):
session = context.session
with session.begin(subtransactions=True):
@ -241,11 +242,11 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
self.driver_manager.delete_servicechain_instance_postcommit(
sc_context)
except Exception:
LOG.exception(_("delete_servicechain_instance_postcommit failed "
LOG.exception(_LE("delete_servicechain_instance_postcommit failed "
"for servicechain_instance %s"),
servicechain_instance_id)
@log.log
@log.log_method_call
def create_service_profile(self, context, service_profile):
session = context.session
with session.begin(subtransactions=True):
@ -263,15 +264,14 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
sc_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_(
LOG.error(_LE(
"driver_manager.create_service_profile_postcommit "
"failed, deleting service_profile %s"),
result['id'])
"failed, deleting service_profile %s"), result['id'])
self.delete_service_profile(context, result['id'])
return result
@log.log
@log.log_method_call
def update_service_profile(self, context, service_profile_id,
service_profile):
session = context.session
@ -293,7 +293,7 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
sc_context)
return updated_profile
@log.log
@log.log_method_call
def delete_service_profile(self, context, service_profile_id):
session = context.session
with session.begin(subtransactions=True):
@ -310,6 +310,6 @@ class ServiceChainPlugin(servicechain_db.ServiceChainDbPlugin,
self.driver_manager.delete_service_profile_postcommit(
sc_context)
except Exception:
LOG.exception(_("delete_service_profile_postcommit failed "
LOG.exception(_LE("delete_service_profile_postcommit failed "
"for service_profile %s"),
service_profile_id)

View File

@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _LI
from neutron.common import exceptions as n_exc
from oslo_config import cfg
from oslo_log import log as logging
@ -32,12 +33,13 @@ class NodeDriverManager(stevedore.named.NamedExtensionManager):
# Ordered list of node drivers.
self.ordered_drivers = []
names = cfg.CONF.node_composition_plugin.node_drivers
LOG.info(_("Configured service chain node driver names: %s"), names)
LOG.info(_LI("Configured service chain node driver names: %s"), names)
super(NodeDriverManager,
self).__init__(
'gbpservice.neutron.servicechain.ncp_drivers', names,
invoke_on_load=True, name_order=True)
LOG.info(_("Loaded service chain node driver names: %s"), self.names())
LOG.info(_LI(
"Loaded service chain node driver names: %s"), self.names())
self._register_drivers()
def _register_drivers(self):
@ -45,14 +47,14 @@ class NodeDriverManager(stevedore.named.NamedExtensionManager):
for ext in self:
self.drivers[ext.name] = ext
self.ordered_drivers.append(ext)
LOG.info(_("Registered service chain node drivers: %s"),
LOG.info(_LI("Registered service chain node drivers: %s"),
[driver.name for driver in self.ordered_drivers])
def initialize(self):
"""Initialize all the service chain node drivers."""
self.native_bulk_support = True
for driver in self.ordered_drivers:
LOG.info(_("Initializing service chain node drivers '%s'"),
LOG.info(_LI("Initializing service chain node drivers '%s'"),
driver.name)
driver.obj.initialize(driver.name)
self.native_bulk_support &= getattr(driver.obj,

View File

@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from oslo_log import helpers as log
from gbpservice.neutron.services.servicechain.plugins.ncp import driver_base
@ -19,52 +19,52 @@ class NoopNodeDriver(driver_base.NodeDriverBase):
initialized = False
@log.log
@log.log_method_call
def initialize(self, name):
self.initialized = True
self._name = name
@log.log
@log.log_method_call
def get_plumbing_info(self, context):
pass
@log.log
@log.log_method_call
def validate_create(self, context):
pass
@log.log
@log.log_method_call
def validate_update(self, context):
pass
@log.log
@log.log_method_call
def create(self, context):
pass
@log.log
@log.log_method_call
def delete(self, context):
pass
@log.log
@log.log_method_call
def update(self, context):
pass
@log.log
@log.log_method_call
def update_policy_target_added(self, context, policy_target):
pass
@log.log
@log.log_method_call
def update_policy_target_removed(self, context, policy_target):
pass
@log.log
@log.log_method_call
def update_node_consumer_ptg_added(self, context, policy_target_group):
pass
@log.log
@log.log_method_call
def update_node_consumer_ptg_removed(self, context, policy_target_group):
pass
@log.log
@log.log_method_call
def notify_chain_parameters_updated(self, context):
pass

View File

@ -12,10 +12,11 @@
import time
from neutron.common import log
from neutron._i18n import _LE
from neutron.db import model_base
from neutron.plugins.common import constants as pconst
from oslo_config import cfg
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_serialization import jsonutils
import sqlalchemy as sa
@ -116,16 +117,16 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
'OS::Neutron::Firewall',
'OS::Neutron::FirewallPolicy']}
@log.log
@log.log_method_call
def initialize(self, name):
self.initialized = True
self._name = name
@log.log
@log.log_method_call
def get_plumbing_info(self, context):
pass
@log.log
@log.log_method_call
def validate_create(self, context):
if context.current_profile is None:
raise ServiceProfileRequired()
@ -138,7 +139,7 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
self._validate_service_config(context.current_node['config'],
service_type)
@log.log
@log.log_method_call
def validate_update(self, context):
if not context.original_node: # PT create/delete notifications
return
@ -179,7 +180,7 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
raise HeatResourceMissing(resource=resource_name,
servicetype=service_type)
@log.log
@log.log_method_call
def create(self, context):
heatclient = self._get_heat_client(context.plugin_context)
@ -197,7 +198,7 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
context.plugin_session, context.current_node['id'],
context.instance['id'], stack['stack']['id'])
@log.log
@log.log_method_call
def delete(self, context):
stack_ids = self._get_node_instance_stacks(context.plugin_session,
context.current_node['id'],
@ -213,7 +214,7 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
context.current_node['id'],
context.instance['id'])
@log.log
@log.log_method_call
def update(self, context):
heatclient = self._get_heat_client(context.plugin_context)
@ -227,25 +228,25 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
heatclient, stack.stack_id, 'update')
heatclient.update(stack.stack_id, stack_template, stack_params)
@log.log
@log.log_method_call
def update_policy_target_added(self, context, policy_target):
if context.current_profile['service_type'] == pconst.LOADBALANCER:
self.update(context)
@log.log
@log.log_method_call
def update_policy_target_removed(self, context, policy_target):
if context.current_profile['service_type'] == pconst.LOADBALANCER:
self.update(context)
@log.log
@log.log_method_call
def update_node_consumer_ptg_added(self, context, policy_target_group):
pass
@log.log
@log.log_method_call
def update_node_consumer_ptg_removed(self, context, policy_target_group):
pass
@log.log
@log.log_method_call
def notify_chain_parameters_updated(self, context):
self.update(context)
@ -326,15 +327,15 @@ class HeatNodeDriver(driver_base.NodeDriverBase):
'DELETE_IN_PROGRESS']:
return
except Exception:
LOG.exception(_("Retrieving the stack %(stack)s failed."),
LOG.exception(_LE("Retrieving the stack %(stack)s failed."),
{'stack': stack_id})
return
else:
time.sleep(STACK_ACTION_RETRY_WAIT)
time_waited = time_waited + STACK_ACTION_RETRY_WAIT
if time_waited >= STACK_ACTION_WAIT_TIME:
LOG.error(_("Stack %(action)s not completed within "
"%(wait)s seconds"),
LOG.error(_LE("Stack %(action)s not completed within "
"%(wait)s seconds"),
{'action': action,
'wait': STACK_ACTION_WAIT_TIME,
'stack': stack_id})

View File

@ -12,13 +12,14 @@
from heatclient import client as heat_client
from heatclient import exc as heat_exc
from neutron._i18n import _LW
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class HeatClient:
class HeatClient(object):
def __init__(self, context, heat_uri, password=None,
auth_token=None):
@ -55,8 +56,9 @@ class HeatClient:
try:
self.stacks.delete(stack_id)
except heat_exc.HTTPNotFound:
LOG.warning(_("Stack %(stack)s created by service chain driver is "
"not found at cleanup"), {'stack': stack_id})
LOG.warning(_LW(
"Stack %(stack)s created by service chain driver is "
"not found at cleanup"), {'stack': stack_id})
def get(self, stack_id):
return self.stacks.get(stack_id)

View File

@ -12,6 +12,7 @@
from keystoneclient import exceptions as k_exceptions
from keystoneclient.v2_0 import client as keyclient
from neutron._i18n import _LE
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
@ -76,10 +77,10 @@ class AdminOwnedResourcesApicTSCP(tscp.TrafficStitchingPlumber):
return tenant.id
except k_exceptions.NotFound:
with excutils.save_and_reraise_exception(reraise=True):
LOG.error(_('No tenant with name %s exists.'), tenant)
LOG.error(_LE('No tenant with name %s exists.'), tenant)
except k_exceptions.NoUniqueMatch:
with excutils.save_and_reraise_exception(reraise=True):
LOG.error(_('Multiple tenants matches found for %s'), tenant)
LOG.error(_LE('Multiple tenants matches found for %s'), tenant)
def _get_resource_owner_context(self, context):
resource_owner_context = context.elevated()

View File

@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from oslo_log import helpers as log
from gbpservice.neutron.services.servicechain.plugins.ncp import plumber_base
@ -19,14 +19,14 @@ class NoopPlumber(plumber_base.NodePlumberBase):
initialized = False
@log.log
@log.log_method_call
def initialize(self):
self.initialized = True
@log.log
@log.log_method_call
def plug_services(self, context, deployment):
self._sort_deployment(deployment)
@log.log
@log.log_method_call
def unplug_services(self, context, deployment):
self._sort_deployment(deployment)

View File

@ -10,6 +10,9 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _LE
from neutron._i18n import _LI
from neutron._i18n import _LW
from neutron.api.v2 import attributes as attr
from neutron import manager
from oslo_config import cfg
@ -44,8 +47,8 @@ class TrafficStitchingPlumber(plumber_base.NodePlumberBase):
# Verify that proxy_group extension is loaded
if pg_ext.PROXY_GROUP not in cfg.CONF.group_policy.extension_drivers:
LOG.error(_("proxy_group GBP driver extension is mandatory for "
"traffic stitching plumber."))
LOG.error(_LE("proxy_group GBP driver extension is mandatory for "
"traffic stitching plumber."))
raise exc.GroupPolicyDeploymentError()
@property
@ -81,7 +84,7 @@ class TrafficStitchingPlumber(plumber_base.NodePlumberBase):
management, 'management')
# Create proper PTs based on the service type
jump_ptg = None
LOG.info(_("Plumbing service of type '%s'"),
LOG.info(_LI("Plumbing service of type '%s'"),
info['plumbing_type'])
if info['plumbing_type'] == common.PLUMBING_TYPE_ENDPOINT:
# No stitching needed, only provider side PT is created.
@ -119,7 +122,7 @@ class TrafficStitchingPlumber(plumber_base.NodePlumberBase):
context, part_context, info['consumer'],
jump_ptg, 'consumer')
else:
LOG.warning(_("Unsupported plumbing type %s"),
LOG.warning(_LW("Unsupported plumbing type %s"),
info['plumbing_type'])
# Replace current "provider" with jump ptg if needed
provider = jump_ptg or provider

View File

@ -10,9 +10,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import log
from neutron._i18n import _LE
from neutron._i18n import _LI
from neutron.plugins.common import constants as pconst
from oslo_config import cfg
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import excutils
@ -49,9 +51,9 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
self.plumber = utils.load_plugin(
PLUMBER_NAMESPACE, plumber_klass)
self.plumber.initialize()
LOG.info(_("Initialized node plumber '%s'"), plumber_klass)
LOG.info(_LI("Initialized node plumber '%s'"), plumber_klass)
@log.log
@log.log_method_call
def create_servicechain_instance(self, context, servicechain_instance):
"""Instance created.
@ -75,14 +77,14 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
except Exception:
# Some node could not be deployed
with excutils.save_and_reraise_exception():
LOG.error(_("Node deployment failed, "
"deleting servicechain_instance %s"),
LOG.error(_LE("Node deployment failed, "
"deleting servicechain_instance %s"),
instance['id'])
self.delete_servicechain_instance(context, instance['id'])
return instance
@log.log
@log.log_method_call
def update_servicechain_instance(self, context, servicechain_instance_id,
servicechain_instance):
"""Instance updated.
@ -122,7 +124,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
self._update_servicechain_nodes(context, updaters)
return updated_instance
@log.log
@log.log_method_call
def delete_servicechain_instance(self, context, servicechain_instance_id):
"""Instance deleted.
@ -141,7 +143,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
super(NodeCompositionPlugin, self).delete_servicechain_instance(
context, servicechain_instance_id)
@log.log
@log.log_method_call
def create_servicechain_node(self, context, servicechain_node):
session = context.session
with session.begin(subtransactions=True):
@ -151,7 +153,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
self._validate_shared_create(context, result, 'servicechain_node')
return result
@log.log
@log.log_method_call
def update_servicechain_node(self, context, servicechain_node_id,
servicechain_node):
"""Node Update.
@ -191,12 +193,12 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
try:
update['driver'].update(update['context'])
except exc.NodeDriverError as ex:
LOG.error(_("Node Update failed, %s"),
LOG.error(_LE("Node Update failed, %s"),
ex.message)
return updated_sc_node
@log.log
@log.log_method_call
def create_servicechain_spec(self, context, servicechain_spec):
session = context.session
with session.begin(subtransactions=True):
@ -206,7 +208,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
self._validate_shared_create(context, result, 'servicechain_spec')
return result
@log.log
@log.log_method_call
def update_servicechain_spec(self, context, servicechain_spec_id,
servicechain_spec):
session = context.session
@ -227,7 +229,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
return updated_sc_spec
@log.log
@log.log_method_call
def create_service_profile(self, context, service_profile):
session = context.session
with session.begin(subtransactions=True):
@ -237,7 +239,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
self._validate_shared_create(context, result, 'service_profile')
return result
@log.log
@log.log_method_call
def update_service_profile(self, context, service_profile_id,
service_profile):
session = context.session
@ -301,8 +303,8 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
'update_policy_target_' + action)(
update['context'], policy_target)
except exc.NodeDriverError as ex:
LOG.error(_("Node Update on policy target modification "
"failed, %s"), ex.message)
LOG.error(_LE("Node Update on policy target modification "
"failed, %s"), ex.message)
def _update_chains_consumer_modified(self, context, policy_target_group,
instance_id, action):
@ -315,8 +317,9 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
'update_node_consumer_ptg_' + action)(
update['context'], policy_target_group)
except exc.NodeDriverError as ex:
LOG.error(_("Node Update on policy target group modification "
"failed, %s"), ex.message)
LOG.error(_LE(
"Node Update on policy target group modification "
"failed, %s"), ex.message)
def notify_chain_parameters_updated(self, context,
servicechain_instance_id):
@ -335,8 +338,8 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
getattr(update['driver'],
'notify_chain_parameters_updated')(update['context'])
except exc.NodeDriverError as ex:
LOG.error(_("Node Update on GBP parameter update "
"failed, %s"), ex.message)
LOG.error(_LE("Node Update on GBP parameter update "
"failed, %s"), ex.message)
def _get_instance_nodes(self, context, instance):
context = utils.admin_context(context)
@ -394,7 +397,7 @@ class NodeCompositionPlugin(servicechain_db.ServiceChainDbPlugin,
try:
driver.delete(destroy['context'])
except exc.NodeDriverError:
LOG.error(_("Node destroy failed, for node %s "),
LOG.error(_LE("Node destroy failed, for node %s "),
driver['context'].current_node['id'])
except Exception as e:
LOG.exception(e)

View File

@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _LE
from neutron.api.v2 import attributes as nattr
from neutron import manager as n_manager
from neutron.plugins.common import constants as pconst
@ -50,7 +51,7 @@ class SharingMixin(object):
plugins = n_manager.NeutronManager.get_service_plugins()
gbp_plugin = plugins.get(pconst.GROUP_POLICY)
if not gbp_plugin:
LOG.error(_("No group policy service plugin found."))
LOG.error(_LE("No group policy service plugin found."))
raise gp_exc.GroupPolicyDeploymentError()
return gbp_plugin

View File

@ -48,6 +48,18 @@ AGENT_CONF = {'alive': True, 'binary': 'somebinary',
'topic': 'sometopic', 'agent_type': AGENT_TYPE,
'configurations': {'bridge_mappings': {'physnet1': 'br-eth1'}}}
# There are some Neutron extensions which are designated as "required" for
# supporting the extensions which are needed to run the GBP UTs.
# For example, when using the router plugin, it supports the
# "router_availability' extension which requires the
# "availability_zone" extension, and which in turn requires the "agent"
# extension. For us to be able to use that router plugin as is, we add
# those required extensions to the list of "supported_extension_aliases"
# for our test plugins. This keeps the extensions framework happy and we
# it shouldn't cause a problem since we dont actually exercise those
# extensions. The following list contains all such extensions.
UNSUPPORTED_REQUIRED_EXTS = ['availability_zone', 'agent']
class ApiManagerMixin(object):
@ -261,7 +273,7 @@ class GroupPolicyDBTestBase(ApiManagerMixin):
class GroupPolicyDBTestPlugin(gpdb.GroupPolicyDbPlugin):
supported_extension_aliases = ['group-policy']
supported_extension_aliases = ['group-policy'] + UNSUPPORTED_REQUIRED_EXTS
path_prefix = "/grouppolicy"
@ -271,7 +283,7 @@ DB_GP_PLUGIN_KLASS = (GroupPolicyDBTestPlugin.__module__ + '.' +
class ServiceChainDBTestPlugin(svcchain_db.ServiceChainDbPlugin):
supported_extension_aliases = ['servicechain']
supported_extension_aliases = ['servicechain'] + UNSUPPORTED_REQUIRED_EXTS
path_prefix = "/servicechain"
@ -380,7 +392,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('policy_targets', pt_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyTargetNotFound,
self.plugin.get_policy_target, ctx, pt_id)
@ -519,7 +531,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('policy_target_groups', ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyTargetGroupNotFound,
self.plugin.get_policy_target_group, ctx, ptg_id)
@ -567,7 +579,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.L2PolicyNotFound, self.plugin.get_l2_policy,
ctx, l2p_id)
@ -704,7 +716,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('l3_policies', l3p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.L3PolicyNotFound, self.plugin.get_l3_policy,
ctx, l3p_id)
@ -914,7 +926,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('policy_classifiers', pc_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyClassifierNotFound,
self.plugin.get_policy_classifier, ctx, pc_id)
@ -985,7 +997,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('policy_actions', pa_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyActionNotFound,
self.plugin.get_policy_action, ctx, pa_id)
@ -1094,7 +1106,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('policy_rules', pr_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyRuleNotFound,
self.plugin.get_policy_rule, ctx, pr_id)
@ -1225,7 +1237,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('policy_rule_sets', prs_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyRuleSetNotFound,
self.plugin.get_policy_rule_set, ctx, prs_id)
@ -1292,14 +1304,14 @@ class TestGroupResources(GroupPolicyDbTestCase):
data = {'policy_rule_set': {'child_policy_rule_sets': [nephew['id']]}}
req = self.new_update_request('policy_rule_sets', data, child['id'])
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPBadRequest.code)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_prs_parent_no_loop(self):
prs = self.create_policy_rule_set()['policy_rule_set']
data = {'policy_rule_set': {'child_policy_rule_sets': [prs['id']]}}
req = self.new_update_request('policy_rule_sets', data, prs['id'])
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPBadRequest.code)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def _test_create_and_show(self, type, attrs, expected=None):
plural = cm.get_resource_plural(type)
@ -1430,7 +1442,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('external_policies', ep_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.ExternalPolicyNotFound,
self.plugin.get_external_policy, ctx, ep_id)
@ -1441,7 +1453,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('external_segments', ep_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.ExternalSegmentNotFound,
self.plugin.get_external_segment, ctx, ep_id)
@ -1452,7 +1464,7 @@ class TestGroupResources(GroupPolicyDbTestCase):
req = self.new_delete_request('nat_pools', ep_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.NATPoolNotFound,
self.plugin.get_nat_pool, ctx, ep_id)

View File

@ -27,7 +27,8 @@ from gbpservice.neutron.tests.unit.db.grouppolicy import (
class GroupPolicyMappingDBTestPlugin(gpmdb.GroupPolicyMappingDbPlugin):
supported_extension_aliases = ['group-policy', 'group-policy-mapping']
supported_extension_aliases = ['group-policy', 'group-policy-mapping'] + (
tgpdb.UNSUPPORTED_REQUIRED_EXTS)
path_prefix = "/grouppolicy"
@ -77,7 +78,7 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
self.assertEqual(port_id, res['policy_target']['port_id'])
req = self.new_delete_request('policy_targets', pt_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_create_delete_policy_target_group_with_subnets(self):
with self.subnet(cidr='10.10.1.0/24') as subnet1:
@ -95,7 +96,7 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
sorted(res['policy_target_group']['subnets']))
req = self.new_delete_request('policy_target_groups', ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_update_policy_target_group_subnets(self):
with self.subnet(cidr='10.10.1.0/24') as subnet1:
@ -131,8 +132,8 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
req = self.new_delete_request('policy_target_groups',
ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(
res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code,
res.status_int)
def test_create_delete_l2_policy_with_network(self):
with self.network() as network:
@ -145,7 +146,7 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
self.assertEqual(network_id, res['l2_policy']['network_id'])
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_create_delete_l3_policy_with_routers(self):
with self.router() as router1:
@ -163,7 +164,7 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
sorted(res['l3_policy']['routers']))
req = self.new_delete_request('l3_policies', l3p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_update_l3_policy_routers(self):
with self.router() as router1:
@ -194,8 +195,8 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
# the resource(s) that are created.
req = self.new_delete_request('l3_policies', l3p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int,
webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code,
res.status_int)
def test_create_delete_es_with_subnet(self):
with self.subnet(cidr='10.10.1.0/24') as subnet:
@ -210,7 +211,7 @@ class TestMappedGroupResourceAttrs(GroupPolicyMappingDbTestCase):
self.assertEqual(subnet_id, res['external_segment']['subnet_id'])
req = self.new_delete_request('external_segments', es_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_list_policy_targets(self):
with self.port() as port1:

View File

@ -69,7 +69,8 @@ class ServiceChainDBTestBase(test_group_policy_db.GroupPolicyDBTestBase):
class ServiceChainDBTestPlugin(svcchain_db.ServiceChainDbPlugin):
supported_extension_aliases = ['servicechain']
supported_extension_aliases = ['servicechain'] + (
test_group_policy_db.UNSUPPORTED_REQUIRED_EXTS)
path_prefix = "/servicechain"
DB_GP_PLUGIN_KLASS = (ServiceChainDBTestPlugin.__module__ + '.' +
@ -200,7 +201,7 @@ class TestServiceChainResources(ServiceChainDbTestCase):
req = self.new_delete_request('servicechain_specs', scs_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
# After deleting the Service Chain Spec, node delete should succeed
req = self.new_delete_request('servicechain_nodes', scn_id)
@ -335,7 +336,7 @@ class TestServiceChainResources(ServiceChainDbTestCase):
req = self.new_delete_request('servicechain_instances', sci_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(service_chain.ServiceChainInstanceNotFound,
self.plugin.get_servicechain_instance,
ctx, sci_id)
@ -343,7 +344,7 @@ class TestServiceChainResources(ServiceChainDbTestCase):
# Deleting the spec should succeed after the instance is deleted
req = self.new_delete_request('servicechain_specs', scs_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(service_chain.ServiceChainSpecNotFound,
self.plugin.get_servicechain_spec, ctx, scs_id)
@ -530,12 +531,12 @@ class TestServiceChainResources(ServiceChainDbTestCase):
req = self.new_delete_request('servicechain_nodes', scn_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
# After deleting the Service Chain Spec, node delete should succeed
req = self.new_delete_request('service_profiles', sp_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(service_chain.ServiceProfileNotFound,
self.plugin.get_service_profile,
ctx, sp_id)

View File

@ -227,7 +227,7 @@ class TestPolicyTarget(ApicMappingTestCase):
policy_target_group_id=ptg['id'], port_id=port['port']['id'])
res = self.new_delete_request('ports', port['port']['id'],
self.fmt).get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.delete_policy_target(pt['policy_target']['id'],
expected_res_status=404)
@ -1487,7 +1487,7 @@ class TestL3Policy(ApicMappingTestCase):
expected_res_status=200)['l3_policy']
req = self.new_delete_request('l3_policies', l3p['id'], self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
mgr = self.driver.apic_manager
owner = self.common_tenant if shared_es else es1['tenant_id']
@ -1525,7 +1525,7 @@ class TestL3Policy(ApicMappingTestCase):
expected_res_status=200)['l3_policy']
req = self.new_delete_request('l3_policies', l3p['id'], self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
expected_delete_calls = []
if not self.pre_l3out:
@ -1761,7 +1761,7 @@ class TestL3Policy(ApicMappingTestCase):
res = self.new_delete_request('policy_target_groups', ptg['id'],
self.fmt).get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_multi_es_with_ptg_1(self):
self._test_multi_es_with_ptg(False)

View File

@ -166,10 +166,10 @@ class TestL3Policy(GroupPolicyPluginTestCase):
def test_shared_l3_policy_create(self):
# Verify default is False
l3p = self.create_l3_policy()
self.assertEqual(False, l3p['l3_policy']['shared'])
self.assertFalse(l3p['l3_policy']['shared'])
# Verify shared True created without errors
l3p = self.create_l3_policy(shared=True)
self.assertEqual(True, l3p['l3_policy']['shared'])
self.assertTrue(l3p['l3_policy']['shared'])
def test_shared_l3p_create_with_es(self):
def combination(l3p, es):
@ -311,10 +311,10 @@ class TestL2Policy(GroupPolicyPluginTestCase):
l3p = self.create_l3_policy(shared=True)['l3_policy']
# Verify Default False
l2p = self.create_l2_policy(l3_policy_id=l3p['id'])
self.assertEqual(False, l2p['l2_policy']['shared'])
self.assertFalse(l2p['l2_policy']['shared'])
# Verify shared True created without errors
l2p = self.create_l2_policy(l3_policy_id=l3p['id'], shared=True)
self.assertEqual(True, l2p['l2_policy']['shared'])
self.assertTrue(l2p['l2_policy']['shared'])
def test_shared_l2_policy_update(self):
l2p = self._create_l2_policy_on_shared()
@ -364,11 +364,11 @@ class TestPolicyRuleSet(GroupPolicyPluginTestCase):
# Verify shared policy_rule_set created with shared rules
prs = self._create_policy_rule_set_on_shared(
shared=True, expected_res_status=201)
self.assertEqual(True, prs['shared'])
self.assertTrue(prs['shared'])
# Verify non shared policy_rule_set created with shared rules
prs = self._create_policy_rule_set_on_shared(expected_res_status=201)
self.assertEqual(False, prs['shared'])
self.assertFalse(prs['shared'])
def test_shared_policy_rule_set_update(self):
prs = self._create_policy_rule_set_on_shared()
@ -417,11 +417,11 @@ class TestPolicyRule(GroupPolicyPluginTestCase):
# Verify shared rule created with shared actions and classifier
pr = self._create_rule_on_shared(shared=True,
expected_res_status=201)
self.assertEqual(True, pr['shared'])
self.assertTrue(pr['shared'])
# Verify non shared rule create with shared actions and classifier
pr = self._create_rule_on_shared(expected_res_status=201)
self.assertEqual(False, pr['shared'])
self.assertFalse(pr['shared'])
def test_shared_rule_update(self):
pr = self._create_rule_on_shared()
@ -508,7 +508,7 @@ class TestPolicyTargetGroup(GroupPolicyPluginTestCase):
req = self.new_delete_request('policy_target_groups', ptg['id'],
self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
self.assertRaises(gpolicy.PolicyTargetGroupNotFound,
self.plugin.get_policy_target_group, ctx, ptg['id'])
@ -542,33 +542,33 @@ class TestPolicyTargetGroup(GroupPolicyPluginTestCase):
# policy_rule_sets
ptg = self.create_policy_target_group(
l2_policy_id=l2p['id'], expected_res_status=201)
self.assertEqual(False, ptg['policy_target_group']['shared'])
self.assertFalse(ptg['policy_target_group']['shared'])
ptg = self.create_policy_target_group(
l2_policy_id=l2p['id'],
provided_policy_rule_sets={prs['id']: '', ctns['id']: ''},
consumed_policy_rule_sets={prs['id']: '', ctns['id']: ''},
expected_res_status=201)
self.assertEqual(False, ptg['policy_target_group']['shared'])
self.assertFalse(ptg['policy_target_group']['shared'])
ptg = self.create_policy_target_group(
l2_policy_id=l2p['id'], network_service_policy_id=nsp['id'],
expected_res_status=201)
self.assertEqual(False, ptg['policy_target_group']['shared'])
self.assertFalse(ptg['policy_target_group']['shared'])
ptg = self.create_policy_target_group(
l2_policy_id=l2p['id'], network_service_policy_id=nspns['id'],
expected_res_status=201)
self.assertEqual(False, ptg['policy_target_group']['shared'])
self.assertFalse(ptg['policy_target_group']['shared'])
# Verify shared True created without errors by providing/consuming
# shared policy_rule_sets
ptg = self.create_policy_target_group(
l2_policy_id=l2p['id'], shared=True,
expected_res_status=201)
self.assertEqual(True, ptg['policy_target_group']['shared'])
self.assertTrue(ptg['policy_target_group']['shared'])
ptg = self.create_policy_target_group(
l2_policy_id=l2p['id'], provided_policy_rule_sets={prs['id']: ''},
consumed_policy_rule_sets={prs['id']: ''}, shared=True,
expected_res_status=201)
self.assertEqual(True, ptg['policy_target_group']['shared'])
self.assertTrue(ptg['policy_target_group']['shared'])
# Verify not shared created without error on not shared l2p
self.create_policy_target_group(l2_policy_id=l2pns['id'],
@ -671,10 +671,10 @@ class TestExternalSegment(GroupPolicyPluginTestCase):
def test_shared_es_create(self):
# Verify default is False
es = self.create_external_segment()
self.assertEqual(False, es['external_segment']['shared'])
self.assertFalse(es['external_segment']['shared'])
# Verify shared True created without errors
es = self.create_external_segment(shared=True)
self.assertEqual(True, es['external_segment']['shared'])
self.assertTrue(es['external_segment']['shared'])
def test_shared_es_update(self):
es = self.create_external_segment()['external_segment']
@ -727,26 +727,26 @@ class TestExternalPolicy(GroupPolicyPluginTestCase):
# policy_rule_sets
ep = self.create_external_policy(
external_segments=[es['id']], expected_res_status=201)
self.assertEqual(False, ep['external_policy']['shared'])
self.assertFalse(ep['external_policy']['shared'])
ep = self.create_external_policy(
external_segments=[es['id']],
provided_policy_rule_sets={prs['id']: '', prsns['id']: ''},
consumed_policy_rule_sets={prs['id']: '', prsns['id']: ''},
expected_res_status=201)
self.assertEqual(False, ep['external_policy']['shared'])
self.assertFalse(ep['external_policy']['shared'])
# Verify shared True created without errors by providing/consuming
# shared policy_rule_sets
ep = self.create_external_policy(
external_segments=[es['id']], shared=True,
expected_res_status=201)
self.assertEqual(True, ep['external_policy']['shared'])
self.assertTrue(ep['external_policy']['shared'])
ep = self.create_external_policy(
external_segments=[es['id']],
provided_policy_rule_sets={prs['id']: ''},
consumed_policy_rule_sets={prs['id']: ''}, shared=True,
expected_res_status=201)
self.assertEqual(True, ep['external_policy']['shared'])
self.assertTrue(ep['external_policy']['shared'])
# Verify not shared created without error on not shared es
self.create_external_policy(

View File

@ -64,19 +64,19 @@ class TestImplicitL2Policy(ImplicitPolicyTestCase):
# policy.
req = self.new_delete_request('policy_target_groups', ptg1_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l2_policies', l2p1_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
# Verify deleting 2nd policy_target group does cleanup its L2
# policy.
req = self.new_delete_request('policy_target_groups', ptg2_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l2_policies', l2p2_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_implicit_shared_lifecycle_negative(self):
# Create PTG non shared
@ -103,10 +103,10 @@ class TestImplicitL2Policy(ImplicitPolicyTestCase):
# Verify deleting policy_target group does not cleanup L2 policy.
req = self.new_delete_request('policy_target_groups', ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l2_policies', l2p_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_delete_from_implicit(self):
# Create policy_target group with explicit L2 policy.
@ -122,7 +122,7 @@ class TestImplicitL2Policy(ImplicitPolicyTestCase):
# Verify old L2 policy was not cleaned up.
req = self.new_show_request('l2_policies', l2p1_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
class TestImplicitL3Policy(ImplicitPolicyTestCase):
@ -151,18 +151,18 @@ class TestImplicitL3Policy(ImplicitPolicyTestCase):
# Verify deleting 1st L2 policy does not cleanup L3 policy.
req = self.new_delete_request('l2_policies', l2p1_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l3_policies', l3p_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
# Verify deleting last L2 policy does cleanup L3 policy.
req = self.new_delete_request('l2_policies', l2p2_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l3_policies', l3p_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_impicit_lifecycle(self):
self._test_implicit_lifecycle()
@ -181,10 +181,10 @@ class TestImplicitL3Policy(ImplicitPolicyTestCase):
# Verify deleting L2 policy does not cleanup L3 policy.
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l3_policies', l3p_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_unowned_default_lifecycle(self):
# Create L2 policy with unowned default L3 policy.
@ -197,10 +197,10 @@ class TestImplicitL3Policy(ImplicitPolicyTestCase):
# Verify deleting L2 policy does not cleanup L3 policy.
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l3_policies', l3p_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_single_default_policy(self):
# Verify only one default L3 policy can be created per tenant.
@ -230,15 +230,15 @@ class TestImplicitL3Policy(ImplicitPolicyTestCase):
# Verify old L3 policy was cleaned up
req = self.new_show_request('l3_policies', l3p1_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
# Verify deleting L2 policy does not cleanup new L3 policy.
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l3_policies', l3p2_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_update_to_implicit(self):
# Create L2 policy with explicit L3 policy.
@ -262,15 +262,15 @@ class TestImplicitL3Policy(ImplicitPolicyTestCase):
# Verify old L3 policy was not cleaned up.
req = self.new_show_request('l3_policies', l3p1_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
# Verify deleting L2 policy does cleanup new L3 policy.
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('l3_policies', l3p2_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
class TestImplicitExternalSegment(ImplicitPolicyTestCase):

View File

@ -44,6 +44,7 @@ from gbpservice.neutron.services.grouppolicy.drivers import nsp_manager
from gbpservice.neutron.services.grouppolicy.drivers import resource_mapping
from gbpservice.neutron.services.servicechain.plugins.msc import (
config as sc_cfg)
from gbpservice.neutron.tests.unit.db.grouppolicy import test_group_policy_db
from gbpservice.neutron.tests.unit.services.grouppolicy import (
test_grouppolicy_plugin as test_plugin)
@ -59,7 +60,18 @@ class NoL3NatSGTestPlugin(
test_l3.TestNoL3NatPlugin,
test_securitygroup.SecurityGroupTestPlugin):
_supported_extension_aliases = ["external-net", "security-group"]
supported_extension_aliases = ["external-net", "security-group"] + (
test_group_policy_db.UNSUPPORTED_REQUIRED_EXTS)
# Note that the following private attribute definition should not
# be required, however the following line of code in the resource
# mapping driver requires it:
# https://git.io/v2O8G
# hence we add it to this test plugin. In general, this is not a
# good thing since the Neutron plugin contract only requires definition
# of the "supported_extension_aliases" property. This currently works
# since the ML2 plugin also defines the private property
# "_supported_extensions_aliases".
_supported_extension_aliases = supported_extension_aliases
CORE_PLUGIN = ('gbpservice.neutron.tests.unit.services.grouppolicy.'
@ -566,10 +578,10 @@ class TestPolicyTarget(ResourceMappingTestCase, TestClusterIdMixin):
# Verify deleting policy_target cleans up port.
req = self.new_delete_request('policy_targets', pt_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('ports', port_id, fmt=self.fmt)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_explicit_port_lifecycle(self):
# Create policy_target group.
@ -590,10 +602,10 @@ class TestPolicyTarget(ResourceMappingTestCase, TestClusterIdMixin):
# Verify deleting policy_target does not cleanup port.
req = self.new_delete_request('policy_targets', pt_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('ports', port_id, fmt=self.fmt)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_explicit_port_deleted(self):
# Create policy_target group.
@ -612,11 +624,11 @@ class TestPolicyTarget(ResourceMappingTestCase, TestClusterIdMixin):
req = self.new_delete_request('ports', port_id)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
# Verify deleting policy_target does not cleanup port.
req = self.new_delete_request('policy_targets', pt_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_missing_ptg_rejected(self):
data = self.create_policy_target(
@ -923,10 +935,10 @@ class TestPolicyTargetGroup(ResourceMappingTestCase):
# Verify deleting policy_target group cleans up subnet.
req = self.new_delete_request('policy_target_groups', ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('subnets', subnet_id, fmt=self.fmt)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
# TODO(rkukura): Verify implicit subnet was removed as router
# interface.
@ -966,10 +978,10 @@ class TestPolicyTargetGroup(ResourceMappingTestCase):
# Verify deleting policy_target group does not cleanup subnet.
req = self.new_delete_request('policy_target_groups', ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('subnets', subnet_id, fmt=self.fmt)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
# TODO(rkukura): Verify explicit subnet was removed as
# router interface.
@ -1002,7 +1014,7 @@ class TestPolicyTargetGroup(ResourceMappingTestCase):
req = self.new_update_request('policy_target_groups', data,
ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_add_subnet_negative(self):
# Create L2P
@ -1114,7 +1126,7 @@ class TestPolicyTargetGroup(ResourceMappingTestCase):
{'provided_policy_rule_sets': {policy_rule_set_id: None}}}
req = self.new_update_request('policy_target_groups', data, ptg_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_default_security_group_egress_rules(self):
# Create PTG and retrieve self subnet
@ -1348,10 +1360,10 @@ class TestL2Policy(ResourceMappingTestCase):
# Verify deleting L2 policy cleans up network.
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('networks', network_id, fmt=self.fmt)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def _test_explicit_network_lifecycle(self, shared=False):
# Create L2 policy with explicit network.
@ -1365,10 +1377,10 @@ class TestL2Policy(ResourceMappingTestCase):
# Verify deleting L2 policy does not cleanup network.
req = self.new_delete_request('l2_policies', l2p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('networks', network_id, fmt=self.fmt)
res = req.get_response(self.api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_implicit_network_lifecycle(self):
self._test_implicit_network_lifecycle()
@ -1467,10 +1479,10 @@ class TestL3Policy(ResourceMappingTestCase):
# Verify deleting L3 policy cleans up router.
req = self.new_delete_request('l3_policies', l3p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('routers', router_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNotFound.code)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_explicit_router_lifecycle(self):
# Create L3 policy with explicit router.
@ -1487,10 +1499,10 @@ class TestL3Policy(ResourceMappingTestCase):
# Verify deleting L3 policy does not cleanup router.
req = self.new_delete_request('l3_policies', l3p_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
req = self.new_show_request('routers', router_id, fmt=self.fmt)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
def test_multiple_routers_rejected(self):
# Verify update l3 policy with explicit router rejected.
@ -1890,7 +1902,7 @@ class TestPolicyRuleSet(ResourceMappingTestCase):
{'provided_policy_rule_sets': {policy_rule_set2_id: None}}}
req = self.new_update_request('policy_target_groups', data, ptg2_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
# set ptg1 to provide policy_rule_set1 and consume policy_rule_set2
# policy_rule_set2 now maps to SG which has ptg2's subnet as CIDR on
# rules
@ -1898,7 +1910,7 @@ class TestPolicyRuleSet(ResourceMappingTestCase):
{'consumed_policy_rule_sets': {policy_rule_set2_id: None}}}
req = self.new_update_request('policy_target_groups', data, ptg1_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
port_id = pt1['policy_target']['port_id']
res = self.new_show_request('ports', port_id)
port = self.deserialize(self.fmt, res.get_response(self.api))
@ -1952,7 +1964,7 @@ class TestPolicyRuleSet(ResourceMappingTestCase):
{'policy_classifier_id': classifier2_id}}
req = self.new_update_request('policy_rules', data, policy_rule_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
port_id = pt['policy_target']['port_id']
res = self.new_show_request('ports', port_id)
port = self.deserialize(self.fmt, res.get_response(self.api))
@ -2000,7 +2012,7 @@ class TestPolicyRuleSet(ResourceMappingTestCase):
req = self.new_update_request('policy_classifiers', data,
classifier_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
port_id = pt['policy_target']['port_id']
res = self.new_show_request('ports', port_id)
port = self.deserialize(self.fmt, res.get_response(self.api))
@ -2041,7 +2053,7 @@ class TestPolicyRuleSet(ResourceMappingTestCase):
req = self.new_update_request('policy_classifiers', data,
classifier_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPOk.code)
self.assertEqual(webob.exc.HTTPOk.code, res.status_int)
self._verify_prs_rules(policy_rule_set_id)
@ -2568,7 +2580,7 @@ class TestServiceChain(ResourceMappingTestCase):
self._verify_prs_rules(prs2['id'])
sc_instances_new = self._list(SERVICECHAIN_INSTANCES)
self.assertEqual(sc_instances, sc_instances_new)
self.assertEqual(sc_instance_update.call_args_list, [])
self.assertEqual([], sc_instance_update.call_args_list)
# update with a new redirect ruleset and verify that the instance is
# updated with the new classifier
@ -2605,7 +2617,7 @@ class TestServiceChain(ResourceMappingTestCase):
self._verify_prs_rules(prs2['id'])
sc_instances_new = self._list(SERVICECHAIN_INSTANCES)
self.assertEqual([], sc_instances_new['servicechain_instances'])
self.assertEqual(sc_instance_update.call_args_list, [])
self.assertEqual([], sc_instance_update.call_args_list)
# Verify that PTG update removing prs cleansup the chain instances
self._verify_ptg_prs_unset_cleansup_chain(provider_ptg, [prs1['id']])
@ -2656,7 +2668,7 @@ class TestServiceChain(ResourceMappingTestCase):
expected_provider_ptg_ids = set([provider_ptg, provider_ptg_new])
self.assertEqual(expected_provider_ptg_ids,
sc_instances_provider_ptg_ids)
self.assertEqual(sc_instance_update.call_args_list, [])
self.assertEqual([], sc_instance_update.call_args_list)
def test_action_spec_value_update(self):
scs1_id = self._create_servicechain_spec()
@ -3070,7 +3082,7 @@ class TestServiceChain(ResourceMappingTestCase):
req = self.new_delete_request(
'policy_target_groups', provider_ptg1_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
sc_instances = self._list(SERVICECHAIN_INSTANCES)
self.assertEqual(1, len(sc_instances['servicechain_instances']))
@ -3234,13 +3246,13 @@ class TestServiceChain(ResourceMappingTestCase):
req = self.new_delete_request(
'policy_target_groups', consumer_ptg1_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
sc_instances = self._list(SERVICECHAIN_INSTANCES)
self.assertEqual(2, len(sc_instances['servicechain_instances']))
req = self.new_delete_request('policy_target_groups', provider_ptg1_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
sc_instances = self._list(SERVICECHAIN_INSTANCES)
self.assertEqual(1, len(sc_instances['servicechain_instances']))
sc_instance = sc_instances['servicechain_instances'][0]
@ -4002,7 +4014,7 @@ class TestNetworkServicePolicy(ResourceMappingTestCase):
expected_res_status=webob.exc.HTTPCreated.code)
req = self.new_delete_request('nat_pools', nat_pool['id'])
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, webob.exc.HTTPNoContent.code)
self.assertEqual(webob.exc.HTTPNoContent.code, res.status_int)
def test_update_nsp_nat_pool_after_pt_create(self):
routes = [{'destination': '0.0.0.0/0', 'nexthop': None}]

View File

@ -30,17 +30,28 @@ from gbpservice.neutron.services.servicechain.plugins.ncp import (
context as ncp_context)
from gbpservice.neutron.services.servicechain.plugins.ncp import (
exceptions as exc)
from gbpservice.neutron.services.servicechain.plugins.ncp import (
plugin as ncp_plugin)
import gbpservice.neutron.services.servicechain.plugins.ncp.config # noqa
from gbpservice.neutron.services.servicechain.plugins.ncp.node_drivers import (
dummy_driver as dummy_driver)
from gbpservice.neutron.tests.unit.db.grouppolicy import test_group_policy_db
from gbpservice.neutron.tests.unit.services.grouppolicy import (
test_resource_mapping as test_gp_driver)
from gbpservice.neutron.tests.unit.services.servicechain import (
test_servicechain_plugin as test_base)
SC_PLUGIN_KLASS = (
"gbpservice.neutron.services.servicechain.plugins.ncp.plugin."
"NodeCompositionPlugin")
CORE_PLUGIN = ('gbpservice.neutron.tests.unit.services.grouppolicy.'
'test_resource_mapping.NoL3NatSGTestPlugin')
class ServiceChainNCPTestPlugin(ncp_plugin.NodeCompositionPlugin):
supported_extension_aliases = ['servicechain'] + (
test_group_policy_db.UNSUPPORTED_REQUIRED_EXTS)
path_prefix = "/servicechain"
SC_PLUGIN_KLASS = (ServiceChainNCPTestPlugin.__module__ + '.' +
ServiceChainNCPTestPlugin.__name__)
CORE_PLUGIN = test_gp_driver.CORE_PLUGIN
GP_PLUGIN_KLASS = (
"gbpservice.neutron.services.grouppolicy.plugin.GroupPolicyPlugin"
)

View File

@ -189,7 +189,11 @@ class TestImplicitServiceChains(ResourceMappingStitchingPlumberGBPTestCase,
for target in targets:
pt = self.show_policy_target(
target.policy_target_id)['policy_target']
# Being service targets, port filter and hybrid plug will be false
port = self._bind_port_to_host(pt['port_id'], 'host')['port']
self.assertTrue(port['binding:vif_details']['port_filter'])
self.assertTrue(port['binding:vif_details']['ovs_hybrid_plug'])
# REVISIT: On account of the following commit:
# https://git.io/v2czD
# the hybrid plugging is disabled. Assuming that we are just
# checking for the default value here, this should be fine. Else we
# need to revisit.
self.assertFalse(port['binding:vif_details']['ovs_hybrid_plug'])

View File

@ -18,26 +18,36 @@ from neutron import context as n_ctx
from oslo_config import cfg
from oslo_serialization import jsonutils
from gbpservice.neutron.services.servicechain.plugins.msc import (
plugin as msc_plugin)
from gbpservice.neutron.services.servicechain.plugins.msc import context
from gbpservice.neutron.tests.unit.db.grouppolicy import (
test_servicechain_db as test_servicechain_db)
from gbpservice.neutron.tests.unit.db.grouppolicy import test_group_policy_db
cfg.CONF.import_opt(
'servicechain_drivers',
'gbpservice.neutron.services.servicechain.plugins.msc.config',
group='servicechain')
SC_PLUGIN_KLASS = (
"gbpservice.neutron.services.servicechain.plugins.msc.plugin."
"ServiceChainPlugin")
class ServiceChainMSCTestPlugin(msc_plugin.ServiceChainPlugin):
supported_extension_aliases = ['servicechain'] + (
test_group_policy_db.UNSUPPORTED_REQUIRED_EXTS)
path_prefix = "/servicechain"
SC_PLUGIN_KLASS = (ServiceChainMSCTestPlugin.__module__ + '.' +
ServiceChainMSCTestPlugin.__name__)
class ServiceChainPluginTestCase(test_servicechain_db.ServiceChainDbTestCase):
def setUp(self, core_plugin=None, sc_plugin=None, gp_plugin=None):
if not sc_plugin:
sc_plugin = SC_PLUGIN_KLASS
super(ServiceChainPluginTestCase, self).setUp(core_plugin=core_plugin,
sc_plugin=sc_plugin,
sc_plugin=sc_plugin or
SC_PLUGIN_KLASS,
gp_plugin=gp_plugin)

View File

@ -472,7 +472,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_actions.assert_called_once_with(mock.ANY,
fields=mock.ANY,
filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
def test_get_policy_action(self):
policy_action_id = _uuid()
@ -488,7 +488,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_action.assert_called_once_with(mock.ANY,
policy_action_id,
fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_action', res)
self.assertEqual(expected_value, res['policy_action'])
@ -510,7 +510,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.update_policy_action.assert_called_once_with(
mock.ANY, policy_action_id, policy_action=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_action', res)
self.assertEqual(expected_value, res['policy_action'])
@ -566,7 +566,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_classifiers.assert_called_once_with(
mock.ANY, fields=mock.ANY, filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
def test_get_policy_classifier(self):
policy_classifier_id = _uuid()
@ -581,7 +581,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_classifier.assert_called_once_with(
mock.ANY, policy_classifier_id, fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_classifier', res)
self.assertEqual(expected_value, res['policy_classifier'])
@ -603,7 +603,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.update_policy_classifier.assert_called_once_with(
mock.ANY, policy_classifier_id, policy_classifier=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_classifier', res)
self.assertEqual(expected_value, res['policy_classifier'])
@ -661,7 +661,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_rules.assert_called_once_with(mock.ANY,
fields=mock.ANY,
filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
def test_get_policy_rule(self):
policy_rule_id = _uuid()
@ -676,7 +676,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_rule.assert_called_once_with(
mock.ANY, policy_rule_id, fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_rule', res)
self.assertEqual(expected_value, res['policy_rule'])
@ -698,7 +698,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.update_policy_rule.assert_called_once_with(
mock.ANY, policy_rule_id, policy_rule=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_rule', res)
self.assertEqual(expected_value, res['policy_rule'])
@ -717,7 +717,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
content_type='application/%s' % self.fmt)
self.instance.create_policy_rule_set.assert_called_once_with(
mock.ANY, policy_rule_set=default_data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_rule_set', res)
self.assertEqual(expected_value, res['policy_rule_set'])
@ -754,7 +754,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_rule_sets.assert_called_once_with(
mock.ANY, fields=mock.ANY, filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
def test_get_policy_rule_set(self):
policy_rule_set_id = _uuid()
@ -769,7 +769,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.get_policy_rule_set.assert_called_once_with(
mock.ANY, policy_rule_set_id, fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_rule_set', res)
self.assertEqual(expected_value, res['policy_rule_set'])
@ -791,7 +791,7 @@ class GroupPolicyExtensionTestCase(test_extensions_base.ExtensionTestCase):
instance.update_policy_rule_set.assert_called_once_with(
mock.ANY, policy_rule_set_id, policy_rule_set=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('policy_rule_set', res)
self.assertEqual(expected_value, res['policy_rule_set'])
@ -1053,12 +1053,12 @@ class TestGroupPolicyAttributeConverters(base.BaseTestCase):
gp.convert_action_to_case_insensitive('ALLOW'), 'allow')
self.assertEqual(gp.convert_action_to_case_insensitive('In'), 'in')
self.assertEqual(gp.convert_action_to_case_insensitive('bi'), 'bi')
self.assertEqual(gp.convert_action_to_case_insensitive(''), '')
self.assertEqual('', gp.convert_action_to_case_insensitive(''))
def test_convert_port_to_string(self):
self.assertEqual(gp.convert_port_to_string(100), '100')
self.assertEqual(gp.convert_port_to_string('200'), '200')
self.assertEqual(gp.convert_port_to_string(''), '')
self.assertEqual('', gp.convert_port_to_string(''))
def test_convert_protocol_check_valid_protocols(self):
self.assertEqual(gp.convert_protocol('tcp'), n_consts.PROTO_NAME_TCP)

View File

@ -59,7 +59,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.create_servicechain_node.assert_called_once_with(
mock.ANY, servicechain_node=default_data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_node', res)
self.assertEqual(expected_value, res['servicechain_node'])
@ -101,7 +101,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.get_servicechain_nodes.assert_called_once_with(
mock.ANY, fields=mock.ANY, filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_nodes', res)
self.assertEqual(expected_value, res['servicechain_nodes'])
@ -118,7 +118,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.get_servicechain_node.assert_called_once_with(
mock.ANY, servicechain_node_id, fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_node', res)
self.assertEqual(expected_value, res['servicechain_node'])
@ -138,7 +138,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.update_servicechain_node.assert_called_once_with(
mock.ANY, servicechain_node_id, servicechain_node=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_node', res)
self.assertEqual(expected_value, res['servicechain_node'])
@ -158,7 +158,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.create_servicechain_spec.assert_called_once_with(
mock.ANY, servicechain_spec=default_data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_spec', res)
self.assertEqual(expected_value, res['servicechain_spec'])
@ -197,7 +197,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.get_servicechain_specs.assert_called_once_with(
mock.ANY, fields=mock.ANY, filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_specs', res)
self.assertEqual(expected_value, res['servicechain_specs'])
@ -213,7 +213,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.get_servicechain_spec.assert_called_once_with(
mock.ANY, servicechain_spec_id, fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_spec', res)
self.assertEqual(expected_value, res['servicechain_spec'])
@ -233,7 +233,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.update_servicechain_spec.assert_called_once_with(
mock.ANY, servicechain_spec_id, servicechain_spec=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_spec', res)
self.assertEqual(expected_value, res['servicechain_spec'])
@ -254,7 +254,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.create_servicechain_instance.assert_called_once_with(
mock.ANY, servicechain_instance=default_data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_instance', res)
self.assertEqual(expected_value, res['servicechain_instance'])
@ -299,7 +299,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.get_servicechain_instances.assert_called_once_with(
mock.ANY, fields=mock.ANY, filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_instances', res)
self.assertEqual(expected_value, res['servicechain_instances'])
@ -315,7 +315,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.get_servicechain_instance.assert_called_once_with(
mock.ANY, servicechain_instance_id, fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_instance', res)
self.assertEqual(expected_value, res['servicechain_instance'])
@ -336,7 +336,7 @@ class ServiceChainExtensionTestCase(test_extensions_base.ExtensionTestCase):
self.instance.update_servicechain_instance.assert_called_once_with(
mock.ANY, servicechain_instance_id,
servicechain_instance=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertEqual(exc.HTTPOk.code, res.status_int)
res = self.deserialize(res)
self.assertIn('servicechain_instance', res)
self.assertEqual(expected_value, res['servicechain_instance'])

View File

@ -38,19 +38,23 @@ function init_gbpservice {
# install_gbpservice() - Collect source and prepare
function install_gbpservice {
mv $GBPSERVICE_DIR/test-requirements.txt $GBPSERVICE_DIR/_test-requirements.txt
pip_install -e $GBPSERVICE_DIR
#pip_install -e $GBP_DIR
#sed -i '/gbptestneutron/d' $GBPSERVICE_DIR/test-requirements.txt
#setup_develop $GBPSERVICE_DIR
\cp -rf $GBPSERVICE_DIR/etc/policy.json $Q_POLICY_FILE
sed -i 's/"context_is_admin": "role:admin"/"context_is_admin": "role:admin or user_name:neutron"/g' $Q_POLICY_FILE
mv $GBPSERVICE_DIR/_test-requirements.txt $GBPSERVICE_DIR/test-requirements.txt
}
# install_gbpclient() - Collect source and prepare
function install_gbpclient {
git_clone $GBPCLIENT_REPO $GBPCLIENT_DIR $GBPCLIENT_BRANCH
git_clone $GBPCLIENT_REPO $GBPCLIENT_DIR $GBPCLIENT_BRANCH
mv $GBPCLIENT_DIR/test-requirements.txt $GBPCLIENT_DIR/_test-requirements.txt
setup_develop $GBPCLIENT_DIR
sudo install -D -m 0644 -o $STACK_USER {$GBPCLIENT_DIR/tools/,/etc/bash_completion.d/}gbp.bash_completion
mv $GBPCLIENT_DIR/_test-requirements.txt $GBPCLIENT_DIR/test-requirements.txt
}
# Restore xtrace

View File

@ -15,14 +15,14 @@ Q_SERVICE_PLUGIN_CLASSES=neutron.services.l3_router.l3_router_plugin.L3RouterPlu
GIT_BASE=${GIT_BASE:-git://git.openstack.org}
GBPSERVICE_REPO=${GIT_BASE}/openstack/group-based-policy.git
GBPSERVICE_BRANCH=stable/liberty
#GBPSERVICE_BRANCH=stable/liberty
#GBPSERVICE_REPO=https://review.openstack.org/openstack/group-based-policy
#GBPSERVICE_BRANCH=refs/changes/54/240954/47
GBPCLIENT_REPO=${GIT_BASE}/openstack/python-group-based-policy-client.git
GBPCLIENT_BRANCH=stable/liberty
#GBPCLIENT_REPO=https://review.openstack.org/openstack/python-group-based-policy-client
#GBPCLIENT_BRANCH=refs/changes/73/261773/1
#GBPCLIENT_REPO=${GIT_BASE}/openstack/python-group-based-policy-client.git
#GBPCLIENT_BRANCH=stable/liberty
GBPCLIENT_REPO=https://review.openstack.org/openstack/python-group-based-policy-client
GBPCLIENT_BRANCH=refs/changes/85/281585/2
GBPUI_REPO=${GIT_BASE}/openstack/group-based-policy-ui.git
GBPUI_BRANCH=stable/liberty
@ -37,10 +37,10 @@ GBPHEAT_BRANCH=stable/liberty
# Enable neutron for group-policy-poc
# -----------------------------------
disable_service n-net
disable_service h-eng
disable_service h-api
disable_service h-api-cfn
disable_service h-api-cw
#disable_service h-eng
#disable_service h-api
#disable_service h-api-cfn
#disable_service h-api-cw
enable_service q-svc
enable_service q-agt
enable_service q-dhcp
@ -69,9 +69,9 @@ RECLONE=True
[neutron]
allow_duplicate_networks = True
[[post-config|/etc/heat/heat.conf]]
[DEFAULT]
plugin_dirs=/opt/stack/gbpautomation/gbpautomation/heat
#[[post-config|/etc/heat/heat.conf]]
#[DEFAULT]
#plugin_dirs=/opt/stack/gbpautomation/gbpautomation/heat
[[post-config|/etc/neutron/neutron.conf]]
[group_policy]

View File

@ -10,7 +10,7 @@ XTRACE=$(set +o | grep xtrace)
function prepare_gbp_devstack {
cd $TOP_DIR
sudo git checkout stable/liberty
sudo git checkout master
sudo cp $CONTRIB_DIR/devstack/local.conf $TOP_DIR/local.conf
sudo cp $CONTRIB_DIR/devstack/exercises/*.sh $TOP_DIR/exercises/
sudo cp $CONTRIB_DIR/devstack/lib/* $TOP_DIR/lib/

View File

@ -17,6 +17,14 @@
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr'],
setup_requires=['pbr>=1.8'],
pbr=True)

View File

@ -3,32 +3,31 @@
# process, which may cause wedges in the gate later.
setuptools>=19.2
-e git+https://git.openstack.org/openstack/neutron.git@stable/liberty#egg=neutron
-e git+https://git.openstack.org/openstack/neutron.git@master#egg=neutron
-e git+https://github.com/noironetworks/apic-ml2-driver.git@master#egg=apic_ml2
-e git+https://git.openstack.org/openstack/python-heatclient@stable/liberty#egg=heatclient
-e git+https://git.openstack.org/openstack/python-keystoneclient@stable/liberty#egg=keystoneclient
hacking>=0.9.2,<0.10
-e git+https://git.openstack.org/openstack/python-heatclient@master#egg=heatclient
-e git+https://git.openstack.org/openstack/python-keystoneclient@master#egg=keystoneclient
hacking<0.11,>=0.10.0
cliff>=1.14.0 # Apache-2.0
coverage>=3.6
fixtures>=1.3.1
cliff>=1.15.0 # Apache-2.0
coverage>=3.6 # Apache-2.0
fixtures>=1.3.1 # Apache-2.0/BSD
httplib2>=0.7.5
mock>=1.2
python-subunit>=0.0.18
requests-mock>=0.6.0 # Apache-2.0
sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2
mock>=1.2 # BSD
python-subunit>=0.0.18 # Apache-2.0/BSD
requests-mock>=0.7.0 # Apache-2.0
sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2 # BSD
ordereddict
testrepository>=0.0.18
testtools>=1.4.0
testresources>=0.2.4
testscenarios>=0.4
WebTest>=2.0
testrepository>=0.0.18 # Apache-2.0/BSD
testtools>=1.4.0 # MIT
testresources>=0.2.4 # Apache-2.0/BSD
testscenarios>=0.4 # Apache-2.0/BSD
WebTest>=2.0 # MIT
oslotest>=1.10.0 # Apache-2.0
os-testr>=0.1.0
tempest-lib>=0.8.0
ddt>=0.7.0
pylint==1.4.4 # GNU GPL v2
os-testr>=0.4.1 # Apache-2.0
tempest-lib>=0.13.0 # Apache-2.0
ddt>=1.0.1 # MIT
pylint==1.4.5 # GNU GPL v2
reno>=0.1.1 # Apache2
-e git+https://github.com/noironetworks/python-opflex-agent.git@master#egg=python-opflexagent-agent
-e git+https://github.com/noironetworks/python-opflex-agent.git@sumit/mitaka-sync#egg=python-opflexagent-agent