Use converters and validators from neutron-lib
Related-Blueprint: neutron-lib Change-Id: I6b9079e9e703c6fd75adbed3846e7257685433e8changes/93/307893/8
parent
4148a347b3
commit
78fff41ee3
|
@ -13,15 +13,12 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
import re
|
||||
import sys
|
||||
|
||||
import netaddr
|
||||
from debtcollector import moves
|
||||
from neutron_lib.api import converters as lib_converters
|
||||
from neutron_lib.api import validators as lib_validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
import webob.exc
|
||||
|
||||
|
@ -30,14 +27,12 @@ from neutron.common import _deprecate
|
|||
from neutron.common import constants as n_const
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Defining a constant to avoid repeating string literal in several modules
|
||||
SHARED = 'shared'
|
||||
|
||||
# Used by range check to indicate no limit for a bound.
|
||||
UNLIMITED = None
|
||||
_deprecate._DeprecateSubset.and_also('UNLIMITED', lib_validators)
|
||||
|
||||
# TODO(HenryG): use DB field sizes (neutron-lib 0.1.1)
|
||||
NAME_MAX_LEN = 255
|
||||
TENANT_ID_MAX_LEN = 255
|
||||
DESCRIPTION_MAX_LEN = 255
|
||||
|
@ -46,600 +41,74 @@ DEVICE_ID_MAX_LEN = 255
|
|||
DEVICE_OWNER_MAX_LEN = 255
|
||||
|
||||
|
||||
def _verify_dict_keys(expected_keys, target_dict, strict=True):
|
||||
"""Allows to verify keys in a dictionary.
|
||||
def _lib(old_name):
|
||||
"""Deprecate a function moved to neutron_lib.api.converters/validators."""
|
||||
new_func = getattr(lib_validators, old_name, None)
|
||||
if not new_func:
|
||||
# Try non-private name (without leading underscore)
|
||||
new_func = getattr(lib_validators, old_name[1:], None)
|
||||
if not new_func:
|
||||
# If it isn't a validator, maybe it's a converter
|
||||
new_func = getattr(lib_converters, old_name, None)
|
||||
assert new_func
|
||||
return moves.moved_function(new_func, old_name, __name__,
|
||||
message='moved to neutron_lib',
|
||||
version='mitaka', removal_version='ocata')
|
||||
|
||||
|
||||
_verify_dict_keys = _lib('_verify_dict_keys')
|
||||
is_attr_set = _lib('is_attr_set')
|
||||
_validate_list_of_items = _lib('_validate_list_of_items')
|
||||
_validate_values = _lib('_validate_values')
|
||||
_validate_not_empty_string_or_none = _lib('_validate_not_empty_string_or_none')
|
||||
_validate_not_empty_string = _lib('_validate_not_empty_string')
|
||||
_validate_string_or_none = _lib('_validate_string_or_none')
|
||||
_validate_string = _lib('_validate_string')
|
||||
validate_list_of_unique_strings = _lib('validate_list_of_unique_strings')
|
||||
_validate_boolean = _lib('_validate_boolean')
|
||||
_validate_range = _lib('_validate_range')
|
||||
_validate_no_whitespace = _lib('_validate_no_whitespace')
|
||||
_validate_mac_address = _lib('_validate_mac_address')
|
||||
_validate_mac_address_or_none = _lib('_validate_mac_address_or_none')
|
||||
_validate_ip_address = _lib('_validate_ip_address')
|
||||
_validate_ip_pools = _lib('_validate_ip_pools')
|
||||
_validate_fixed_ips = _lib('_validate_fixed_ips')
|
||||
_validate_nameservers = _lib('_validate_nameservers')
|
||||
_validate_hostroutes = _lib('_validate_hostroutes')
|
||||
_validate_ip_address_or_none = _lib('_validate_ip_address_or_none')
|
||||
_validate_subnet = _lib('_validate_subnet')
|
||||
_validate_subnet_or_none = _lib('_validate_subnet_or_none')
|
||||
_validate_subnet_list = _lib('_validate_subnet_list')
|
||||
_validate_regex = _lib('_validate_regex')
|
||||
_validate_regex_or_none = _lib('_validate_regex_or_none')
|
||||
_validate_subnetpool_id = _lib('_validate_subnetpool_id')
|
||||
_validate_subnetpool_id_or_none = _lib('_validate_subnetpool_id_or_none')
|
||||
_validate_uuid = _lib('_validate_uuid')
|
||||
_validate_uuid_or_none = _lib('_validate_uuid_or_none')
|
||||
_validate_uuid_list = _lib('_validate_uuid_list')
|
||||
_validate_dict_item = _lib('_validate_dict_item')
|
||||
_validate_dict = _lib('_validate_dict')
|
||||
_validate_dict_or_none = _lib('_validate_dict_or_none')
|
||||
_validate_dict_or_empty = _lib('_validate_dict_or_empty')
|
||||
_validate_dict_or_nodata = _lib('_validate_dict_or_nodata')
|
||||
_validate_non_negative = _lib('_validate_non_negative')
|
||||
|
||||
convert_to_boolean = _lib('convert_to_boolean')
|
||||
convert_to_boolean_if_not_none = _lib('convert_to_boolean_if_not_none')
|
||||
convert_to_int = _lib('convert_to_int')
|
||||
convert_to_int_if_not_none = _lib('convert_to_int_if_not_none')
|
||||
convert_to_positive_float_or_none = _lib('convert_to_positive_float_or_none')
|
||||
convert_kvp_str_to_list = _lib('convert_kvp_str_to_list')
|
||||
convert_kvp_list_to_dict = _lib('convert_kvp_list_to_dict')
|
||||
convert_none_to_empty_list = _lib('convert_none_to_empty_list')
|
||||
convert_none_to_empty_dict = _lib('convert_none_to_empty_dict')
|
||||
convert_to_list = _lib('convert_to_list')
|
||||
|
||||
|
||||
_deprecate._DeprecateSubset.and_also('MAC_PATTERN', lib_validators)
|
||||
|
||||
_deprecate._DeprecateSubset.and_also('validators', lib_validators)
|
||||
|
||||
:param expected_keys: A list of keys expected to be present.
|
||||
:param target_dict: The dictionary which should be verified.
|
||||
:param strict: Specifies whether additional keys are allowed to be present.
|
||||
:return: True, if keys in the dictionary correspond to the specification.
|
||||
"""
|
||||
if not isinstance(target_dict, dict):
|
||||
msg = (_("Invalid input. '%(target_dict)s' must be a dictionary "
|
||||
"with keys: %(expected_keys)s") %
|
||||
{'target_dict': target_dict, 'expected_keys': expected_keys})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
expected_keys = set(expected_keys)
|
||||
provided_keys = set(target_dict.keys())
|
||||
|
||||
predicate = expected_keys.__eq__ if strict else expected_keys.issubset
|
||||
|
||||
if not predicate(provided_keys):
|
||||
msg = (_("Validation of dictionary's keys failed. "
|
||||
"Expected keys: %(expected_keys)s "
|
||||
"Provided keys: %(provided_keys)s") %
|
||||
{'expected_keys': expected_keys,
|
||||
'provided_keys': provided_keys})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def is_attr_set(attribute):
|
||||
return not (attribute is None or attribute is constants.ATTR_NOT_SPECIFIED)
|
||||
|
||||
|
||||
def _validate_list_of_items(item_validator, data, *args, **kwargs):
|
||||
if not isinstance(data, list):
|
||||
msg = _("'%s' is not a list") % data
|
||||
return msg
|
||||
|
||||
if len(set(data)) != len(data):
|
||||
msg = _("Duplicate items in the list: '%s'") % ', '.join(data)
|
||||
return msg
|
||||
|
||||
for item in data:
|
||||
msg = item_validator(item, *args, **kwargs)
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_values(data, valid_values=None):
|
||||
if data not in valid_values:
|
||||
msg = (_("'%(data)s' is not in %(valid_values)s") %
|
||||
{'data': data, 'valid_values': valid_values})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_not_empty_string_or_none(data, max_len=None):
|
||||
if data is not None:
|
||||
return _validate_not_empty_string(data, max_len=max_len)
|
||||
|
||||
|
||||
def _validate_not_empty_string(data, max_len=None):
|
||||
msg = _validate_string(data, max_len=max_len)
|
||||
if msg:
|
||||
return msg
|
||||
if not data.strip():
|
||||
msg = _("'%s' Blank strings are not permitted") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_string_or_none(data, max_len=None):
|
||||
if data is not None:
|
||||
return _validate_string(data, max_len=max_len)
|
||||
|
||||
|
||||
def _validate_string(data, max_len=None):
|
||||
if not isinstance(data, six.string_types):
|
||||
msg = _("'%s' is not a valid string") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
if max_len is not None and len(data) > max_len:
|
||||
msg = (_("'%(data)s' exceeds maximum length of %(max_len)s") %
|
||||
{'data': data, 'max_len': max_len})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
validate_list_of_unique_strings = functools.partial(_validate_list_of_items,
|
||||
_validate_string)
|
||||
|
||||
|
||||
def _validate_boolean(data, valid_values=None):
|
||||
try:
|
||||
convert_to_boolean(data)
|
||||
except n_exc.InvalidInput:
|
||||
msg = _("'%s' is not a valid boolean value") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_range(data, valid_values=None):
|
||||
"""Check that integer value is within a range provided.
|
||||
|
||||
Test is inclusive. Allows either limit to be ignored, to allow
|
||||
checking ranges where only the lower or upper limit matter.
|
||||
It is expected that the limits provided are valid integers or
|
||||
the value None.
|
||||
"""
|
||||
|
||||
min_value = valid_values[0]
|
||||
max_value = valid_values[1]
|
||||
try:
|
||||
data = int(data)
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' is not an integer") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if min_value is not UNLIMITED and data < min_value:
|
||||
msg = _("'%(data)s' is too small - must be at least "
|
||||
"'%(limit)d'") % {'data': data, 'limit': min_value}
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if max_value is not UNLIMITED and data > max_value:
|
||||
msg = _("'%(data)s' is too large - must be no larger than "
|
||||
"'%(limit)d'") % {'data': data, 'limit': max_value}
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_no_whitespace(data):
|
||||
"""Validates that input has no whitespace."""
|
||||
if re.search(r'\s', data):
|
||||
msg = _("'%s' contains whitespace") % data
|
||||
LOG.debug(msg)
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
return data
|
||||
|
||||
|
||||
def _validate_mac_address(data, valid_values=None):
|
||||
try:
|
||||
valid_mac = netaddr.valid_mac(_validate_no_whitespace(data))
|
||||
except Exception:
|
||||
valid_mac = False
|
||||
|
||||
if valid_mac:
|
||||
valid_mac = not netaddr.EUI(data) in map(
|
||||
netaddr.EUI, constants.INVALID_MAC_ADDRESSES)
|
||||
# TODO(arosen): The code in this file should be refactored
|
||||
# so it catches the correct exceptions. _validate_no_whitespace
|
||||
# raises AttributeError if data is None.
|
||||
if not valid_mac:
|
||||
msg = _("'%s' is not a valid MAC address") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_mac_address_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_mac_address(data, valid_values)
|
||||
|
||||
|
||||
def _validate_ip_address(data, valid_values=None):
|
||||
msg = None
|
||||
try:
|
||||
# netaddr.core.ZEROFILL is only applicable to IPv4.
|
||||
# it will remove leading zeros from IPv4 address octets.
|
||||
ip = netaddr.IPAddress(_validate_no_whitespace(data),
|
||||
flags=netaddr.core.ZEROFILL)
|
||||
# The followings are quick checks for IPv6 (has ':') and
|
||||
# IPv4. (has 3 periods like 'xx.xx.xx.xx')
|
||||
# NOTE(yamamoto): netaddr uses libraries provided by the underlying
|
||||
# platform to convert addresses. For example, inet_aton(3).
|
||||
# Some platforms, including NetBSD and OS X, have inet_aton
|
||||
# implementation which accepts more varying forms of addresses than
|
||||
# we want to accept here. The following check is to reject such
|
||||
# addresses. For Example:
|
||||
# >>> netaddr.IPAddress('1' * 59)
|
||||
# IPAddress('199.28.113.199')
|
||||
# >>> netaddr.IPAddress(str(int('1' * 59) & 0xffffffff))
|
||||
# IPAddress('199.28.113.199')
|
||||
# >>>
|
||||
if ':' not in data and data.count('.') != 3:
|
||||
msg = _("'%s' is not a valid IP address") % data
|
||||
# A leading '0' in IPv4 address may be interpreted as an octal number,
|
||||
# e.g. 011 octal is 9 decimal. Since there is no standard saying
|
||||
# whether IP address with leading '0's should be interpreted as octal
|
||||
# or decimal, hence we reject leading '0's to avoid ambiguity.
|
||||
elif ip.version == 4 and str(ip) != data:
|
||||
msg = _("'%(data)s' is not an accepted IP address, "
|
||||
"'%(ip)s' is recommended") % {"data": data, "ip": ip}
|
||||
except Exception:
|
||||
msg = _("'%s' is not a valid IP address") % data
|
||||
if msg:
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_ip_pools(data, valid_values=None):
|
||||
"""Validate that start and end IP addresses are present.
|
||||
|
||||
In addition to this the IP addresses will also be validated
|
||||
"""
|
||||
if not isinstance(data, list):
|
||||
msg = _("Invalid data format for IP pool: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
expected_keys = ['start', 'end']
|
||||
for ip_pool in data:
|
||||
msg = _verify_dict_keys(expected_keys, ip_pool)
|
||||
if msg:
|
||||
return msg
|
||||
for k in expected_keys:
|
||||
msg = _validate_ip_address(ip_pool[k])
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_fixed_ips(data, valid_values=None):
|
||||
if not isinstance(data, list):
|
||||
msg = _("Invalid data format for fixed IP: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
ips = []
|
||||
for fixed_ip in data:
|
||||
if not isinstance(fixed_ip, dict):
|
||||
msg = _("Invalid data format for fixed IP: '%s'") % fixed_ip
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if 'ip_address' in fixed_ip:
|
||||
# Ensure that duplicate entries are not set - just checking IP
|
||||
# suffices. Duplicate subnet_id's are legitimate.
|
||||
fixed_ip_address = fixed_ip['ip_address']
|
||||
if fixed_ip_address in ips:
|
||||
msg = _("Duplicate IP address '%s'") % fixed_ip_address
|
||||
LOG.debug(msg)
|
||||
else:
|
||||
msg = _validate_ip_address(fixed_ip_address)
|
||||
if msg:
|
||||
return msg
|
||||
ips.append(fixed_ip_address)
|
||||
if 'subnet_id' in fixed_ip:
|
||||
msg = _validate_uuid(fixed_ip['subnet_id'])
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_nameservers(data, valid_values=None):
|
||||
if not hasattr(data, '__iter__'):
|
||||
msg = _("Invalid data format for nameserver: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
hosts = []
|
||||
for host in data:
|
||||
# This must be an IP address only
|
||||
msg = _validate_ip_address(host)
|
||||
if msg:
|
||||
msg = _("'%(host)s' is not a valid nameserver. %(msg)s") % {
|
||||
'host': host, 'msg': msg}
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if host in hosts:
|
||||
msg = _("Duplicate nameserver '%s'") % host
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
hosts.append(host)
|
||||
|
||||
|
||||
def _validate_hostroutes(data, valid_values=None):
|
||||
if not isinstance(data, list):
|
||||
msg = _("Invalid data format for hostroute: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
expected_keys = ['destination', 'nexthop']
|
||||
hostroutes = []
|
||||
for hostroute in data:
|
||||
msg = _verify_dict_keys(expected_keys, hostroute)
|
||||
if msg:
|
||||
return msg
|
||||
msg = _validate_subnet(hostroute['destination'])
|
||||
if msg:
|
||||
return msg
|
||||
msg = _validate_ip_address(hostroute['nexthop'])
|
||||
if msg:
|
||||
return msg
|
||||
if hostroute in hostroutes:
|
||||
msg = _("Duplicate hostroute '%s'") % hostroute
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
hostroutes.append(hostroute)
|
||||
|
||||
|
||||
def _validate_ip_address_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_ip_address(data, valid_values)
|
||||
|
||||
|
||||
def _validate_subnet(data, valid_values=None):
|
||||
msg = None
|
||||
try:
|
||||
net = netaddr.IPNetwork(_validate_no_whitespace(data))
|
||||
if '/' not in data or (net.version == 4 and str(net) != data):
|
||||
msg = _("'%(data)s' isn't a recognized IP subnet cidr,"
|
||||
" '%(cidr)s' is recommended") % {"data": data,
|
||||
"cidr": net.cidr}
|
||||
else:
|
||||
return
|
||||
except Exception:
|
||||
msg = _("'%s' is not a valid IP subnet") % data
|
||||
if msg:
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_subnet_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_subnet(data, valid_values)
|
||||
|
||||
|
||||
_validate_subnet_list = functools.partial(_validate_list_of_items,
|
||||
_validate_subnet)
|
||||
|
||||
|
||||
def _validate_regex(data, valid_values=None):
|
||||
try:
|
||||
if re.match(valid_values, data):
|
||||
return
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
msg = _("'%s' is not a valid input") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_regex_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_regex(data, valid_values)
|
||||
|
||||
|
||||
def _validate_subnetpool_id(data, valid_values=None):
|
||||
if data != constants.IPV6_PD_POOL_ID:
|
||||
return _validate_uuid_or_none(data, valid_values)
|
||||
|
||||
|
||||
def _validate_subnetpool_id_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_subnetpool_id(data, valid_values)
|
||||
|
||||
|
||||
def _validate_uuid(data, valid_values=None):
|
||||
if not uuidutils.is_uuid_like(data):
|
||||
msg = _("'%s' is not a valid UUID") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_uuid_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_uuid(data)
|
||||
|
||||
|
||||
_validate_uuid_list = functools.partial(_validate_list_of_items,
|
||||
_validate_uuid)
|
||||
|
||||
|
||||
def _validate_dict_item(key, key_validator, data):
|
||||
# Find conversion function, if any, and apply it
|
||||
conv_func = key_validator.get('convert_to')
|
||||
if conv_func:
|
||||
data[key] = conv_func(data.get(key))
|
||||
# Find validator function
|
||||
# TODO(salv-orlando): Structure of dict attributes should be improved
|
||||
# to avoid iterating over items
|
||||
val_func = val_params = None
|
||||
for (k, v) in six.iteritems(key_validator):
|
||||
if k.startswith('type:'):
|
||||
# ask forgiveness, not permission
|
||||
try:
|
||||
val_func = validators[k]
|
||||
except KeyError:
|
||||
msg = _("Validator '%s' does not exist.") % k
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
val_params = v
|
||||
break
|
||||
# Process validation
|
||||
if val_func:
|
||||
return val_func(data.get(key), val_params)
|
||||
|
||||
|
||||
def _validate_dict(data, key_specs=None):
|
||||
if not isinstance(data, dict):
|
||||
msg = _("'%s' is not a dictionary") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
# Do not perform any further validation, if no constraints are supplied
|
||||
if not key_specs:
|
||||
return
|
||||
|
||||
# Check whether all required keys are present
|
||||
required_keys = [key for key, spec in six.iteritems(key_specs)
|
||||
if spec.get('required')]
|
||||
|
||||
if required_keys:
|
||||
msg = _verify_dict_keys(required_keys, data, False)
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
# Perform validation and conversion of all values
|
||||
# according to the specifications.
|
||||
for key, key_validator in [(k, v) for k, v in six.iteritems(key_specs)
|
||||
if k in data]:
|
||||
msg = _validate_dict_item(key, key_validator, data)
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_dict_or_none(data, key_specs=None):
|
||||
if data is not None:
|
||||
return _validate_dict(data, key_specs)
|
||||
|
||||
|
||||
def _validate_dict_or_empty(data, key_specs=None):
|
||||
if data != {}:
|
||||
return _validate_dict(data, key_specs)
|
||||
|
||||
|
||||
def _validate_dict_or_nodata(data, key_specs=None):
|
||||
if data:
|
||||
return _validate_dict(data, key_specs)
|
||||
|
||||
|
||||
def _validate_non_negative(data, valid_values=None):
|
||||
try:
|
||||
data = int(data)
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' is not an integer") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
if data < 0:
|
||||
msg = _("'%s' should be non-negative") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def convert_to_boolean(data):
|
||||
if isinstance(data, six.string_types):
|
||||
val = data.lower()
|
||||
if val == "true" or val == "1":
|
||||
return True
|
||||
if val == "false" or val == "0":
|
||||
return False
|
||||
elif isinstance(data, bool):
|
||||
return data
|
||||
elif isinstance(data, int):
|
||||
if data == 0:
|
||||
return False
|
||||
elif data == 1:
|
||||
return True
|
||||
msg = _("'%s' cannot be converted to boolean") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
def convert_to_boolean_if_not_none(data):
|
||||
if data is not None:
|
||||
return convert_to_boolean(data)
|
||||
|
||||
|
||||
def convert_to_int(data):
|
||||
try:
|
||||
return int(data)
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' is not an integer") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
def convert_to_int_if_not_none(data):
|
||||
if data is not None:
|
||||
return convert_to_int(data)
|
||||
return data
|
||||
|
||||
|
||||
def convert_to_positive_float_or_none(val):
|
||||
# NOTE(salv-orlando): This conversion function is currently used by
|
||||
# a vendor specific extension only at the moment It is used for
|
||||
# port's RXTX factor in neutron.plugins.vmware.extensions.qos.
|
||||
# It is deemed however generic enough to be in this module as it
|
||||
# might be used in future for other API attributes.
|
||||
if val is None:
|
||||
return
|
||||
try:
|
||||
val = float(val)
|
||||
if val < 0:
|
||||
raise ValueError()
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' must be a non negative decimal.") % val
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
return val
|
||||
|
||||
|
||||
def convert_kvp_str_to_list(data):
|
||||
"""Convert a value of the form 'key=value' to ['key', 'value'].
|
||||
|
||||
:raises: n_exc.InvalidInput if any of the strings are malformed
|
||||
(e.g. do not contain a key).
|
||||
"""
|
||||
kvp = [x.strip() for x in data.split('=', 1)]
|
||||
if len(kvp) == 2 and kvp[0]:
|
||||
return kvp
|
||||
msg = _("'%s' is not of the form <key>=[value]") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
def convert_kvp_list_to_dict(kvp_list):
|
||||
"""Convert a list of 'key=value' strings to a dict.
|
||||
|
||||
:raises: n_exc.InvalidInput if any of the strings are malformed
|
||||
(e.g. do not contain a key) or if any
|
||||
of the keys appear more than once.
|
||||
"""
|
||||
if kvp_list == ['True']:
|
||||
# No values were provided (i.e. '--flag-name')
|
||||
return {}
|
||||
kvp_map = {}
|
||||
for kvp_str in kvp_list:
|
||||
key, value = convert_kvp_str_to_list(kvp_str)
|
||||
kvp_map.setdefault(key, set())
|
||||
kvp_map[key].add(value)
|
||||
return dict((x, list(y)) for x, y in six.iteritems(kvp_map))
|
||||
|
||||
|
||||
def convert_none_to_empty_list(value):
|
||||
return [] if value is None else value
|
||||
|
||||
|
||||
def convert_none_to_empty_dict(value):
|
||||
return {} if value is None else value
|
||||
|
||||
|
||||
def convert_to_list(data):
|
||||
if data is None:
|
||||
return []
|
||||
elif hasattr(data, '__iter__') and not isinstance(data, six.string_types):
|
||||
return list(data)
|
||||
else:
|
||||
return [data]
|
||||
|
||||
|
||||
# Note: In order to ensure that the MAC address is unicast the first byte
|
||||
# must be even.
|
||||
MAC_PATTERN = "^%s[aceACE02468](:%s{2}){5}$" % (constants.HEX_ELEM,
|
||||
constants.HEX_ELEM)
|
||||
|
||||
# Dictionary that maintains a list of validation functions
|
||||
validators = {'type:dict': _validate_dict,
|
||||
'type:dict_or_none': _validate_dict_or_none,
|
||||
'type:dict_or_empty': _validate_dict_or_empty,
|
||||
'type:dict_or_nodata': _validate_dict_or_nodata,
|
||||
'type:fixed_ips': _validate_fixed_ips,
|
||||
'type:hostroutes': _validate_hostroutes,
|
||||
'type:ip_address': _validate_ip_address,
|
||||
'type:ip_address_or_none': _validate_ip_address_or_none,
|
||||
'type:ip_pools': _validate_ip_pools,
|
||||
'type:mac_address': _validate_mac_address,
|
||||
'type:mac_address_or_none': _validate_mac_address_or_none,
|
||||
'type:nameservers': _validate_nameservers,
|
||||
'type:non_negative': _validate_non_negative,
|
||||
'type:range': _validate_range,
|
||||
'type:regex': _validate_regex,
|
||||
'type:regex_or_none': _validate_regex_or_none,
|
||||
'type:string': _validate_string,
|
||||
'type:string_or_none': _validate_string_or_none,
|
||||
'type:not_empty_string': _validate_not_empty_string,
|
||||
'type:not_empty_string_or_none':
|
||||
_validate_not_empty_string_or_none,
|
||||
'type:subnet': _validate_subnet,
|
||||
'type:subnet_list': _validate_subnet_list,
|
||||
'type:subnet_or_none': _validate_subnet_or_none,
|
||||
'type:subnetpool_id': _validate_subnetpool_id,
|
||||
'type:subnetpool_id_or_none': _validate_subnetpool_id_or_none,
|
||||
'type:uuid': _validate_uuid,
|
||||
'type:uuid_or_none': _validate_uuid_or_none,
|
||||
'type:uuid_list': _validate_uuid_list,
|
||||
'type:values': _validate_values,
|
||||
'type:boolean': _validate_boolean,
|
||||
'type:list_of_unique_strings': validate_list_of_unique_strings}
|
||||
|
||||
# Define constants for base resource name
|
||||
NETWORK = 'network'
|
||||
|
@ -687,7 +156,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'is_visible': True},
|
||||
'admin_state_up': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'status': {'allow_post': False, 'allow_put': False,
|
||||
'is_visible': True},
|
||||
|
@ -698,7 +167,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
SHARED: {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
|
@ -717,7 +186,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'is_visible': True},
|
||||
'admin_state_up': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'mac_address': {'allow_post': True, 'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
|
@ -726,7 +195,8 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'is_visible': True},
|
||||
'fixed_ips': {'allow_post': True, 'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'convert_list_to': convert_kvp_list_to_dict,
|
||||
'convert_list_to':
|
||||
lib_converters.convert_kvp_list_to_dict,
|
||||
'validate': {'type:fixed_ips': None},
|
||||
'enforce_policy': True,
|
||||
'is_visible': True},
|
||||
|
@ -754,7 +224,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'validate': {'type:string': NAME_MAX_LEN},
|
||||
'is_visible': True},
|
||||
'ip_version': {'allow_post': True, 'allow_put': False,
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'validate': {'type:values': [4, 6]},
|
||||
'is_visible': True},
|
||||
'network_id': {'allow_post': True, 'allow_put': False,
|
||||
|
@ -770,7 +240,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'prefixlen': {'allow_post': True,
|
||||
'allow_put': False,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'required_by_policy': False,
|
||||
'is_visible': False},
|
||||
|
@ -789,12 +259,14 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'validate': {'type:ip_pools': None},
|
||||
'is_visible': True},
|
||||
'dns_nameservers': {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': convert_none_to_empty_list,
|
||||
'convert_to':
|
||||
lib_converters.convert_none_to_empty_list,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:nameservers': None},
|
||||
'is_visible': True},
|
||||
'host_routes': {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': convert_none_to_empty_list,
|
||||
'convert_to':
|
||||
lib_converters.convert_none_to_empty_list,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:hostroutes': None},
|
||||
'is_visible': True},
|
||||
|
@ -804,7 +276,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'is_visible': True},
|
||||
'enable_dhcp': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'ipv6_ra_mode': {'allow_post': True, 'allow_put': False,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
|
@ -818,7 +290,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
SHARED: {'allow_post': False,
|
||||
'allow_put': False,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': False,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
|
@ -845,7 +317,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'default_quota': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'is_visible': True},
|
||||
'ip_version': {'allow_post': False,
|
||||
|
@ -854,32 +326,32 @@ RESOURCE_ATTRIBUTE_MAP = {
|
|||
'default_prefixlen': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'is_visible': True},
|
||||
'min_prefixlen': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'is_visible': True},
|
||||
'max_prefixlen': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'is_visible': True},
|
||||
'is_default': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
SHARED: {'allow_post': True,
|
||||
'allow_put': False,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
|
@ -934,8 +406,7 @@ def fill_default_value(attr_info, res_dict,
|
|||
check_allow_post=True):
|
||||
for attr, attr_vals in six.iteritems(attr_info):
|
||||
if attr_vals['allow_post']:
|
||||
if ('default' not in attr_vals and
|
||||
attr not in res_dict):
|
||||
if 'default' not in attr_vals and attr not in res_dict:
|
||||
msg = _("Failed to parse request. Required "
|
||||
"attribute '%s' not specified") % attr
|
||||
raise exc_cls(msg)
|
||||
|
@ -959,7 +430,8 @@ def convert_value(attr_info, res_dict, exc_cls=ValueError):
|
|||
if 'validate' not in attr_vals:
|
||||
continue
|
||||
for rule in attr_vals['validate']:
|
||||
res = validators[rule](res_dict[attr], attr_vals['validate'][rule])
|
||||
res = lib_validators.validators[rule](res_dict[attr],
|
||||
attr_vals['validate'][rule])
|
||||
if res:
|
||||
msg_dict = dict(attr=attr, reason=res)
|
||||
msg = _("Invalid input for %(attr)s. "
|
||||
|
|
|
@ -19,23 +19,33 @@ from neutron._i18n import _
|
|||
|
||||
|
||||
class _DeprecateSubset(object):
|
||||
additional = {}
|
||||
|
||||
def __init__(self, my_globals, other_mod):
|
||||
self.other_mod = other_mod
|
||||
self.my_globals = copy.copy(my_globals)
|
||||
|
||||
@classmethod
|
||||
def and_also(cls, name, other_mod):
|
||||
cls.additional[name] = other_mod
|
||||
|
||||
def __getattr__(self, name):
|
||||
a = self.my_globals.get(name)
|
||||
if (not name.startswith("__") and not inspect.ismodule(a) and
|
||||
name in vars(self.other_mod)):
|
||||
if not name.startswith("__") and not inspect.ismodule(a):
|
||||
other_mod = self.additional.get(name) or self.other_mod
|
||||
if name in vars(other_mod):
|
||||
|
||||
debtcollector.deprecate(
|
||||
name,
|
||||
message='moved to neutron_lib',
|
||||
version='mitaka',
|
||||
removal_version='newton',
|
||||
stacklevel=4)
|
||||
# These should be enabled after most have been cleaned up
|
||||
# in neutron proper, which may not happen during the busy M-3.
|
||||
|
||||
return vars(self.other_mod)[name]
|
||||
debtcollector.deprecate(
|
||||
name,
|
||||
message='moved to %s' % other_mod.__name__,
|
||||
version='mitaka',
|
||||
removal_version='newton',
|
||||
stacklevel=4)
|
||||
|
||||
return vars(other_mod)[name]
|
||||
|
||||
try:
|
||||
return self.my_globals[name]
|
||||
|
|
|
@ -20,6 +20,7 @@ Routines for configuring Neutron
|
|||
import sys
|
||||
|
||||
from keystoneauth1 import loading as ks_loading
|
||||
from neutron_lib.api import validators
|
||||
from oslo_config import cfg
|
||||
from oslo_db import options as db_options
|
||||
from oslo_log import log as logging
|
||||
|
@ -28,7 +29,6 @@ from oslo_middleware import cors
|
|||
from oslo_service import wsgi
|
||||
|
||||
from neutron._i18n import _, _LI
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import constants
|
||||
from neutron.common import utils
|
||||
from neutron import policy
|
||||
|
@ -241,8 +241,7 @@ def init(args, **kwargs):
|
|||
n_rpc.init(cfg.CONF)
|
||||
|
||||
# Validate that the base_mac is of the correct format
|
||||
msg = attributes._validate_regex(cfg.CONF.base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
msg = validators.validate_regex(cfg.CONF.base_mac, validators.MAC_PATTERN)
|
||||
if msg:
|
||||
msg = _("Base MAC: %s") % msg
|
||||
raise Exception(msg)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import datetime
|
||||
|
||||
from eventlet import greenthread
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from oslo_config import cfg
|
||||
from oslo_db import exception as db_exc
|
||||
|
@ -282,7 +283,7 @@ class AgentDbMixin(ext_agent.AgentPluginBase, AgentAvailabilityZoneMixin):
|
|||
filters=filters, fields=fields)
|
||||
alive = filters and filters.get('alive', None)
|
||||
if alive:
|
||||
alive = attributes.convert_to_boolean(alive[0])
|
||||
alive = converters.convert_to_boolean(alive[0])
|
||||
agents = [agent for agent in agents if agent['alive'] == alive]
|
||||
return agents
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from oslo_db import exception as db_exc
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from neutron.api.v2 import attributes as attr
|
||||
|
@ -43,7 +43,7 @@ class AllowedAddressPairsMixin(object):
|
|||
|
||||
def _process_create_allowed_address_pairs(self, context, port,
|
||||
allowed_address_pairs):
|
||||
if not attr.is_attr_set(allowed_address_pairs):
|
||||
if not validators.is_attr_set(allowed_address_pairs):
|
||||
return []
|
||||
try:
|
||||
with context.session.begin(subtransactions=True):
|
||||
|
@ -95,7 +95,7 @@ class AllowedAddressPairsMixin(object):
|
|||
return self._fields(res, fields)
|
||||
|
||||
def _has_address_pairs(self, port):
|
||||
return (attr.is_attr_set(port['port'][addr_pair.ADDRESS_PAIRS])
|
||||
return (validators.is_attr_set(port['port'][addr_pair.ADDRESS_PAIRS])
|
||||
and port['port'][addr_pair.ADDRESS_PAIRS] != [])
|
||||
|
||||
def _check_update_has_allowed_address_pairs(self, port):
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
import functools
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
|
@ -309,9 +310,9 @@ class DbBasePluginCommon(common_db_mixin.CommonDbMixin):
|
|||
'gateway_ip': gateway_ip,
|
||||
'description': subnet.get('description')}
|
||||
if subnet['ip_version'] == 6 and subnet['enable_dhcp']:
|
||||
if attributes.is_attr_set(subnet['ipv6_ra_mode']):
|
||||
if validators.is_attr_set(subnet['ipv6_ra_mode']):
|
||||
args['ipv6_ra_mode'] = subnet['ipv6_ra_mode']
|
||||
if attributes.is_attr_set(subnet['ipv6_address_mode']):
|
||||
if validators.is_attr_set(subnet['ipv6_address_mode']):
|
||||
args['ipv6_address_mode'] = subnet['ipv6_address_mode']
|
||||
return args
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import functools
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as exc
|
||||
from oslo_config import cfg
|
||||
|
@ -231,8 +232,8 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
if cur_subnet:
|
||||
self._validate_ipv6_update_dhcp(subnet, cur_subnet)
|
||||
return
|
||||
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = attributes.is_attr_set(
|
||||
ra_mode_set = validators.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = validators.is_attr_set(
|
||||
subnet.get('ipv6_address_mode'))
|
||||
self._validate_ipv6_dhcp(ra_mode_set, address_mode_set,
|
||||
subnet['enable_dhcp'])
|
||||
|
@ -274,16 +275,16 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
msg = _("Cannot disable enable_dhcp with "
|
||||
"ipv6 attributes set")
|
||||
|
||||
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = attributes.is_attr_set(
|
||||
ra_mode_set = validators.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = validators.is_attr_set(
|
||||
subnet.get('ipv6_address_mode'))
|
||||
|
||||
if ra_mode_set or address_mode_set:
|
||||
raise exc.InvalidInput(error_message=msg)
|
||||
|
||||
old_ra_mode_set = attributes.is_attr_set(
|
||||
old_ra_mode_set = validators.is_attr_set(
|
||||
cur_subnet.get('ipv6_ra_mode'))
|
||||
old_address_mode_set = attributes.is_attr_set(
|
||||
old_address_mode_set = validators.is_attr_set(
|
||||
cur_subnet.get('ipv6_address_mode'))
|
||||
|
||||
if old_ra_mode_set or old_address_mode_set:
|
||||
|
@ -439,7 +440,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
|
||||
ip_ver = s['ip_version']
|
||||
|
||||
if attributes.is_attr_set(s.get('cidr')):
|
||||
if validators.is_attr_set(s.get('cidr')):
|
||||
self._validate_ip_version(ip_ver, s['cidr'], 'cidr')
|
||||
|
||||
# TODO(watanabe.isao): After we found a way to avoid the re-sync
|
||||
|
@ -466,7 +467,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
"if enable_dhcp is True.")
|
||||
raise exc.InvalidInput(error_message=error_message)
|
||||
|
||||
if attributes.is_attr_set(s.get('gateway_ip')):
|
||||
if validators.is_attr_set(s.get('gateway_ip')):
|
||||
self._validate_ip_version(ip_ver, s['gateway_ip'], 'gateway_ip')
|
||||
is_gateway_not_valid = (
|
||||
ipam.utils.check_gateway_invalid_in_subnet(
|
||||
|
@ -491,7 +492,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
ip_address=cur_subnet['gateway_ip'],
|
||||
port_id=allocated['port_id'])
|
||||
|
||||
if attributes.is_attr_set(s.get('dns_nameservers')):
|
||||
if validators.is_attr_set(s.get('dns_nameservers')):
|
||||
if len(s['dns_nameservers']) > cfg.CONF.max_dns_nameservers:
|
||||
raise n_exc.DNSNameServersExhausted(
|
||||
subnet_id=s.get('id', _('new subnet')),
|
||||
|
@ -505,7 +506,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
dns))
|
||||
self._validate_ip_version(ip_ver, dns, 'dns_nameserver')
|
||||
|
||||
if attributes.is_attr_set(s.get('host_routes')):
|
||||
if validators.is_attr_set(s.get('host_routes')):
|
||||
if len(s['host_routes']) > cfg.CONF.max_subnet_host_routes:
|
||||
raise n_exc.HostRoutesExhausted(
|
||||
subnet_id=s.get('id', _('new subnet')),
|
||||
|
@ -515,11 +516,11 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
self._validate_host_route(rt, ip_ver)
|
||||
|
||||
if ip_ver == 4:
|
||||
if attributes.is_attr_set(s.get('ipv6_ra_mode')):
|
||||
if validators.is_attr_set(s.get('ipv6_ra_mode')):
|
||||
raise exc.InvalidInput(
|
||||
error_message=(_("ipv6_ra_mode is not valid when "
|
||||
"ip_version is 4")))
|
||||
if attributes.is_attr_set(s.get('ipv6_address_mode')):
|
||||
if validators.is_attr_set(s.get('ipv6_address_mode')):
|
||||
raise exc.InvalidInput(
|
||||
error_message=(_("ipv6_address_mode is not valid when "
|
||||
"ip_version is 4")))
|
||||
|
@ -626,11 +627,11 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
return
|
||||
|
||||
cidr = subnet.get('cidr')
|
||||
if attributes.is_attr_set(cidr):
|
||||
if validators.is_attr_set(cidr):
|
||||
ip_version = netaddr.IPNetwork(cidr).version
|
||||
else:
|
||||
ip_version = subnet.get('ip_version')
|
||||
if not attributes.is_attr_set(ip_version):
|
||||
if not validators.is_attr_set(ip_version):
|
||||
msg = _('ip_version must be specified in the absence of '
|
||||
'cidr and subnetpool_id')
|
||||
raise exc.BadRequest(resource='subnets', msg=msg)
|
||||
|
@ -658,8 +659,8 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
s = subnet['subnet']
|
||||
cidr = s.get('cidr', constants.ATTR_NOT_SPECIFIED)
|
||||
prefixlen = s.get('prefixlen', constants.ATTR_NOT_SPECIFIED)
|
||||
has_cidr = attributes.is_attr_set(cidr)
|
||||
has_prefixlen = attributes.is_attr_set(prefixlen)
|
||||
has_cidr = validators.is_attr_set(cidr)
|
||||
has_prefixlen = validators.is_attr_set(prefixlen)
|
||||
|
||||
if has_cidr and has_prefixlen:
|
||||
msg = _('cidr and prefixlen must not be supplied together')
|
||||
|
@ -915,7 +916,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
|||
- the address family of the subnetpool and address scope
|
||||
are the same
|
||||
"""
|
||||
if not attributes.is_attr_set(address_scope_id):
|
||||
if not validators.is_attr_set(address_scope_id):
|
||||
return
|
||||
|
||||
if not self.is_address_scope_owned_by_tenant(context,
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
@ -20,7 +21,6 @@ import sqlalchemy as sa
|
|||
from sqlalchemy import orm
|
||||
|
||||
from neutron._i18n import _, _LE
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import utils
|
||||
from neutron.db import db_base_plugin_v2
|
||||
from neutron.db import l3_db
|
||||
|
@ -154,7 +154,7 @@ class DNSDbMixin(object):
|
|||
floatingip_data, req_data):
|
||||
# expects to be called within a plugin's session
|
||||
dns_domain = req_data.get(dns.DNSDOMAIN)
|
||||
if not attributes.is_attr_set(dns_domain):
|
||||
if not validators.is_attr_set(dns_domain):
|
||||
return
|
||||
if not self.dns_driver:
|
||||
return
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as l3_constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
import sqlalchemy as sa
|
||||
|
@ -118,7 +119,7 @@ class External_net_db_mixin(object):
|
|||
|
||||
def _process_l3_create(self, context, net_data, req_data):
|
||||
external = req_data.get(external_net.EXTERNAL)
|
||||
external_set = attributes.is_attr_set(external)
|
||||
external_set = validators.is_attr_set(external)
|
||||
|
||||
if not external_set:
|
||||
return
|
||||
|
@ -157,7 +158,7 @@ class External_net_db_mixin(object):
|
|||
|
||||
new_value = req_data.get(external_net.EXTERNAL)
|
||||
net_id = net_data['id']
|
||||
if not attributes.is_attr_set(new_value):
|
||||
if not validators.is_attr_set(new_value):
|
||||
return
|
||||
|
||||
if net_data.get(external_net.EXTERNAL) == new_value:
|
||||
|
|
|
@ -17,6 +17,7 @@ import collections
|
|||
import itertools
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from neutron_lib import exceptions as exc
|
||||
from oslo_config import cfg
|
||||
|
@ -25,7 +26,6 @@ from oslo_log import log as logging
|
|||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from neutron._i18n import _, _LI
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import constants
|
||||
from neutron.common import exceptions as n_exc
|
||||
from neutron.common import ipv6_utils
|
||||
|
@ -79,8 +79,8 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
|||
|
||||
Allocation pools can be set for specific subnet request only
|
||||
"""
|
||||
has_allocpool = attributes.is_attr_set(subnet['allocation_pools'])
|
||||
is_any_subnetpool_request = not attributes.is_attr_set(subnet['cidr'])
|
||||
has_allocpool = validators.is_attr_set(subnet['allocation_pools'])
|
||||
is_any_subnetpool_request = not validators.is_attr_set(subnet['cidr'])
|
||||
if is_any_subnetpool_request and has_allocpool:
|
||||
reason = _("allocation_pools allowed only "
|
||||
"for specific subnet requests.")
|
||||
|
@ -89,7 +89,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
|||
def _validate_ip_version_with_subnetpool(self, subnet, subnetpool):
|
||||
"""Validates ip version for subnet_pool and requested subnet"""
|
||||
ip_version = subnet.get('ip_version')
|
||||
has_ip_version = attributes.is_attr_set(ip_version)
|
||||
has_ip_version = validators.is_attr_set(ip_version)
|
||||
if has_ip_version and ip_version != subnetpool.ip_version:
|
||||
args = {'req_ver': str(subnet['ip_version']),
|
||||
'pool_ver': str(subnetpool.ip_version)}
|
||||
|
@ -350,7 +350,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
|||
|
||||
def _prepare_allocation_pools(self, allocation_pools, cidr, gateway_ip):
|
||||
"""Returns allocation pools represented as list of IPRanges"""
|
||||
if not attributes.is_attr_set(allocation_pools):
|
||||
if not validators.is_attr_set(allocation_pools):
|
||||
return self.generate_pools(cidr, gateway_ip)
|
||||
|
||||
ip_range_pools = self.pools_to_ip_range(allocation_pools)
|
||||
|
@ -450,7 +450,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
|||
context.session.add(subnet)
|
||||
# NOTE(changzhi) Store DNS nameservers with order into DB one
|
||||
# by one when create subnet with DNS nameservers
|
||||
if attributes.is_attr_set(dns_nameservers):
|
||||
if validators.is_attr_set(dns_nameservers):
|
||||
for order, server in enumerate(dns_nameservers):
|
||||
dns = models_v2.DNSNameServer(
|
||||
address=server,
|
||||
|
@ -458,7 +458,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
|||
subnet_id=subnet.id)
|
||||
context.session.add(dns)
|
||||
|
||||
if attributes.is_attr_set(host_routes):
|
||||
if validators.is_attr_set(host_routes):
|
||||
for rt in host_routes:
|
||||
route = models_v2.SubnetRoute(
|
||||
subnet_id=subnet.id,
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
# under the License.
|
||||
import collections
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
|
@ -22,7 +23,6 @@ from oslo_utils import excutils
|
|||
import six
|
||||
|
||||
from neutron._i18n import _, _LI, _LW
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.callbacks import events
|
||||
from neutron.callbacks import exceptions
|
||||
from neutron.callbacks import registry
|
||||
|
@ -952,6 +952,6 @@ def is_distributed_router(router):
|
|||
except AttributeError:
|
||||
# if not, try to see if it is a request body
|
||||
requested_router_type = router.get('distributed')
|
||||
if attributes.is_attr_set(requested_router_type):
|
||||
if validators.is_attr_set(requested_router_type):
|
||||
return requested_router_type
|
||||
return cfg.CONF.router_distributed
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import functools
|
||||
|
||||
import netaddr
|
||||