Use converters and validators from neutron-lib
Related-Blueprint: neutron-lib Change-Id: I6b9079e9e703c6fd75adbed3846e7257685433e8
This commit is contained in:
parent
4148a347b3
commit
78fff41ee3
@ -13,15 +13,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
import re
|
||||
import sys
|
||||
|
||||
import netaddr
|
||||
from debtcollector import moves
|
||||
from neutron_lib.api import converters as lib_converters
|
||||
from neutron_lib.api import validators as lib_validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
import webob.exc
|
||||
|
||||
@ -30,14 +27,12 @@ from neutron.common import _deprecate
|
||||
from neutron.common import constants as n_const
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Defining a constant to avoid repeating string literal in several modules
|
||||
SHARED = 'shared'
|
||||
|
||||
# Used by range check to indicate no limit for a bound.
|
||||
UNLIMITED = None
|
||||
_deprecate._DeprecateSubset.and_also('UNLIMITED', lib_validators)
|
||||
|
||||
# TODO(HenryG): use DB field sizes (neutron-lib 0.1.1)
|
||||
NAME_MAX_LEN = 255
|
||||
TENANT_ID_MAX_LEN = 255
|
||||
DESCRIPTION_MAX_LEN = 255
|
||||
@ -46,600 +41,74 @@ DEVICE_ID_MAX_LEN = 255
|
||||
DEVICE_OWNER_MAX_LEN = 255
|
||||
|
||||
|
||||
def _verify_dict_keys(expected_keys, target_dict, strict=True):
|
||||
"""Allows to verify keys in a dictionary.
|
||||
def _lib(old_name):
|
||||
"""Deprecate a function moved to neutron_lib.api.converters/validators."""
|
||||
new_func = getattr(lib_validators, old_name, None)
|
||||
if not new_func:
|
||||
# Try non-private name (without leading underscore)
|
||||
new_func = getattr(lib_validators, old_name[1:], None)
|
||||
if not new_func:
|
||||
# If it isn't a validator, maybe it's a converter
|
||||
new_func = getattr(lib_converters, old_name, None)
|
||||
assert new_func
|
||||
return moves.moved_function(new_func, old_name, __name__,
|
||||
message='moved to neutron_lib',
|
||||
version='mitaka', removal_version='ocata')
|
||||
|
||||
|
||||
_verify_dict_keys = _lib('_verify_dict_keys')
|
||||
is_attr_set = _lib('is_attr_set')
|
||||
_validate_list_of_items = _lib('_validate_list_of_items')
|
||||
_validate_values = _lib('_validate_values')
|
||||
_validate_not_empty_string_or_none = _lib('_validate_not_empty_string_or_none')
|
||||
_validate_not_empty_string = _lib('_validate_not_empty_string')
|
||||
_validate_string_or_none = _lib('_validate_string_or_none')
|
||||
_validate_string = _lib('_validate_string')
|
||||
validate_list_of_unique_strings = _lib('validate_list_of_unique_strings')
|
||||
_validate_boolean = _lib('_validate_boolean')
|
||||
_validate_range = _lib('_validate_range')
|
||||
_validate_no_whitespace = _lib('_validate_no_whitespace')
|
||||
_validate_mac_address = _lib('_validate_mac_address')
|
||||
_validate_mac_address_or_none = _lib('_validate_mac_address_or_none')
|
||||
_validate_ip_address = _lib('_validate_ip_address')
|
||||
_validate_ip_pools = _lib('_validate_ip_pools')
|
||||
_validate_fixed_ips = _lib('_validate_fixed_ips')
|
||||
_validate_nameservers = _lib('_validate_nameservers')
|
||||
_validate_hostroutes = _lib('_validate_hostroutes')
|
||||
_validate_ip_address_or_none = _lib('_validate_ip_address_or_none')
|
||||
_validate_subnet = _lib('_validate_subnet')
|
||||
_validate_subnet_or_none = _lib('_validate_subnet_or_none')
|
||||
_validate_subnet_list = _lib('_validate_subnet_list')
|
||||
_validate_regex = _lib('_validate_regex')
|
||||
_validate_regex_or_none = _lib('_validate_regex_or_none')
|
||||
_validate_subnetpool_id = _lib('_validate_subnetpool_id')
|
||||
_validate_subnetpool_id_or_none = _lib('_validate_subnetpool_id_or_none')
|
||||
_validate_uuid = _lib('_validate_uuid')
|
||||
_validate_uuid_or_none = _lib('_validate_uuid_or_none')
|
||||
_validate_uuid_list = _lib('_validate_uuid_list')
|
||||
_validate_dict_item = _lib('_validate_dict_item')
|
||||
_validate_dict = _lib('_validate_dict')
|
||||
_validate_dict_or_none = _lib('_validate_dict_or_none')
|
||||
_validate_dict_or_empty = _lib('_validate_dict_or_empty')
|
||||
_validate_dict_or_nodata = _lib('_validate_dict_or_nodata')
|
||||
_validate_non_negative = _lib('_validate_non_negative')
|
||||
|
||||
convert_to_boolean = _lib('convert_to_boolean')
|
||||
convert_to_boolean_if_not_none = _lib('convert_to_boolean_if_not_none')
|
||||
convert_to_int = _lib('convert_to_int')
|
||||
convert_to_int_if_not_none = _lib('convert_to_int_if_not_none')
|
||||
convert_to_positive_float_or_none = _lib('convert_to_positive_float_or_none')
|
||||
convert_kvp_str_to_list = _lib('convert_kvp_str_to_list')
|
||||
convert_kvp_list_to_dict = _lib('convert_kvp_list_to_dict')
|
||||
convert_none_to_empty_list = _lib('convert_none_to_empty_list')
|
||||
convert_none_to_empty_dict = _lib('convert_none_to_empty_dict')
|
||||
convert_to_list = _lib('convert_to_list')
|
||||
|
||||
|
||||
_deprecate._DeprecateSubset.and_also('MAC_PATTERN', lib_validators)
|
||||
|
||||
_deprecate._DeprecateSubset.and_also('validators', lib_validators)
|
||||
|
||||
:param expected_keys: A list of keys expected to be present.
|
||||
:param target_dict: The dictionary which should be verified.
|
||||
:param strict: Specifies whether additional keys are allowed to be present.
|
||||
:return: True, if keys in the dictionary correspond to the specification.
|
||||
"""
|
||||
if not isinstance(target_dict, dict):
|
||||
msg = (_("Invalid input. '%(target_dict)s' must be a dictionary "
|
||||
"with keys: %(expected_keys)s") %
|
||||
{'target_dict': target_dict, 'expected_keys': expected_keys})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
expected_keys = set(expected_keys)
|
||||
provided_keys = set(target_dict.keys())
|
||||
|
||||
predicate = expected_keys.__eq__ if strict else expected_keys.issubset
|
||||
|
||||
if not predicate(provided_keys):
|
||||
msg = (_("Validation of dictionary's keys failed. "
|
||||
"Expected keys: %(expected_keys)s "
|
||||
"Provided keys: %(provided_keys)s") %
|
||||
{'expected_keys': expected_keys,
|
||||
'provided_keys': provided_keys})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def is_attr_set(attribute):
|
||||
return not (attribute is None or attribute is constants.ATTR_NOT_SPECIFIED)
|
||||
|
||||
|
||||
def _validate_list_of_items(item_validator, data, *args, **kwargs):
|
||||
if not isinstance(data, list):
|
||||
msg = _("'%s' is not a list") % data
|
||||
return msg
|
||||
|
||||
if len(set(data)) != len(data):
|
||||
msg = _("Duplicate items in the list: '%s'") % ', '.join(data)
|
||||
return msg
|
||||
|
||||
for item in data:
|
||||
msg = item_validator(item, *args, **kwargs)
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_values(data, valid_values=None):
|
||||
if data not in valid_values:
|
||||
msg = (_("'%(data)s' is not in %(valid_values)s") %
|
||||
{'data': data, 'valid_values': valid_values})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_not_empty_string_or_none(data, max_len=None):
|
||||
if data is not None:
|
||||
return _validate_not_empty_string(data, max_len=max_len)
|
||||
|
||||
|
||||
def _validate_not_empty_string(data, max_len=None):
|
||||
msg = _validate_string(data, max_len=max_len)
|
||||
if msg:
|
||||
return msg
|
||||
if not data.strip():
|
||||
msg = _("'%s' Blank strings are not permitted") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_string_or_none(data, max_len=None):
|
||||
if data is not None:
|
||||
return _validate_string(data, max_len=max_len)
|
||||
|
||||
|
||||
def _validate_string(data, max_len=None):
|
||||
if not isinstance(data, six.string_types):
|
||||
msg = _("'%s' is not a valid string") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
if max_len is not None and len(data) > max_len:
|
||||
msg = (_("'%(data)s' exceeds maximum length of %(max_len)s") %
|
||||
{'data': data, 'max_len': max_len})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
validate_list_of_unique_strings = functools.partial(_validate_list_of_items,
|
||||
_validate_string)
|
||||
|
||||
|
||||
def _validate_boolean(data, valid_values=None):
|
||||
try:
|
||||
convert_to_boolean(data)
|
||||
except n_exc.InvalidInput:
|
||||
msg = _("'%s' is not a valid boolean value") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_range(data, valid_values=None):
|
||||
"""Check that integer value is within a range provided.
|
||||
|
||||
Test is inclusive. Allows either limit to be ignored, to allow
|
||||
checking ranges where only the lower or upper limit matter.
|
||||
It is expected that the limits provided are valid integers or
|
||||
the value None.
|
||||
"""
|
||||
|
||||
min_value = valid_values[0]
|
||||
max_value = valid_values[1]
|
||||
try:
|
||||
data = int(data)
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' is not an integer") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if min_value is not UNLIMITED and data < min_value:
|
||||
msg = _("'%(data)s' is too small - must be at least "
|
||||
"'%(limit)d'") % {'data': data, 'limit': min_value}
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if max_value is not UNLIMITED and data > max_value:
|
||||
msg = _("'%(data)s' is too large - must be no larger than "
|
||||
"'%(limit)d'") % {'data': data, 'limit': max_value}
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_no_whitespace(data):
|
||||
"""Validates that input has no whitespace."""
|
||||
if re.search(r'\s', data):
|
||||
msg = _("'%s' contains whitespace") % data
|
||||
LOG.debug(msg)
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
return data
|
||||
|
||||
|
||||
def _validate_mac_address(data, valid_values=None):
|
||||
try:
|
||||
valid_mac = netaddr.valid_mac(_validate_no_whitespace(data))
|
||||
except Exception:
|
||||
valid_mac = False
|
||||
|
||||
if valid_mac:
|
||||
valid_mac = not netaddr.EUI(data) in map(
|
||||
netaddr.EUI, constants.INVALID_MAC_ADDRESSES)
|
||||
# TODO(arosen): The code in this file should be refactored
|
||||
# so it catches the correct exceptions. _validate_no_whitespace
|
||||
# raises AttributeError if data is None.
|
||||
if not valid_mac:
|
||||
msg = _("'%s' is not a valid MAC address") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_mac_address_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_mac_address(data, valid_values)
|
||||
|
||||
|
||||
def _validate_ip_address(data, valid_values=None):
|
||||
msg = None
|
||||
try:
|
||||
# netaddr.core.ZEROFILL is only applicable to IPv4.
|
||||
# it will remove leading zeros from IPv4 address octets.
|
||||
ip = netaddr.IPAddress(_validate_no_whitespace(data),
|
||||
flags=netaddr.core.ZEROFILL)
|
||||
# The followings are quick checks for IPv6 (has ':') and
|
||||
# IPv4. (has 3 periods like 'xx.xx.xx.xx')
|
||||
# NOTE(yamamoto): netaddr uses libraries provided by the underlying
|
||||
# platform to convert addresses. For example, inet_aton(3).
|
||||
# Some platforms, including NetBSD and OS X, have inet_aton
|
||||
# implementation which accepts more varying forms of addresses than
|
||||
# we want to accept here. The following check is to reject such
|
||||
# addresses. For Example:
|
||||
# >>> netaddr.IPAddress('1' * 59)
|
||||
# IPAddress('199.28.113.199')
|
||||
# >>> netaddr.IPAddress(str(int('1' * 59) & 0xffffffff))
|
||||
# IPAddress('199.28.113.199')
|
||||
# >>>
|
||||
if ':' not in data and data.count('.') != 3:
|
||||
msg = _("'%s' is not a valid IP address") % data
|
||||
# A leading '0' in IPv4 address may be interpreted as an octal number,
|
||||
# e.g. 011 octal is 9 decimal. Since there is no standard saying
|
||||
# whether IP address with leading '0's should be interpreted as octal
|
||||
# or decimal, hence we reject leading '0's to avoid ambiguity.
|
||||
elif ip.version == 4 and str(ip) != data:
|
||||
msg = _("'%(data)s' is not an accepted IP address, "
|
||||
"'%(ip)s' is recommended") % {"data": data, "ip": ip}
|
||||
except Exception:
|
||||
msg = _("'%s' is not a valid IP address") % data
|
||||
if msg:
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_ip_pools(data, valid_values=None):
|
||||
"""Validate that start and end IP addresses are present.
|
||||
|
||||
In addition to this the IP addresses will also be validated
|
||||
"""
|
||||
if not isinstance(data, list):
|
||||
msg = _("Invalid data format for IP pool: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
expected_keys = ['start', 'end']
|
||||
for ip_pool in data:
|
||||
msg = _verify_dict_keys(expected_keys, ip_pool)
|
||||
if msg:
|
||||
return msg
|
||||
for k in expected_keys:
|
||||
msg = _validate_ip_address(ip_pool[k])
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_fixed_ips(data, valid_values=None):
|
||||
if not isinstance(data, list):
|
||||
msg = _("Invalid data format for fixed IP: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
ips = []
|
||||
for fixed_ip in data:
|
||||
if not isinstance(fixed_ip, dict):
|
||||
msg = _("Invalid data format for fixed IP: '%s'") % fixed_ip
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if 'ip_address' in fixed_ip:
|
||||
# Ensure that duplicate entries are not set - just checking IP
|
||||
# suffices. Duplicate subnet_id's are legitimate.
|
||||
fixed_ip_address = fixed_ip['ip_address']
|
||||
if fixed_ip_address in ips:
|
||||
msg = _("Duplicate IP address '%s'") % fixed_ip_address
|
||||
LOG.debug(msg)
|
||||
else:
|
||||
msg = _validate_ip_address(fixed_ip_address)
|
||||
if msg:
|
||||
return msg
|
||||
ips.append(fixed_ip_address)
|
||||
if 'subnet_id' in fixed_ip:
|
||||
msg = _validate_uuid(fixed_ip['subnet_id'])
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_nameservers(data, valid_values=None):
|
||||
if not hasattr(data, '__iter__'):
|
||||
msg = _("Invalid data format for nameserver: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
hosts = []
|
||||
for host in data:
|
||||
# This must be an IP address only
|
||||
msg = _validate_ip_address(host)
|
||||
if msg:
|
||||
msg = _("'%(host)s' is not a valid nameserver. %(msg)s") % {
|
||||
'host': host, 'msg': msg}
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
if host in hosts:
|
||||
msg = _("Duplicate nameserver '%s'") % host
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
hosts.append(host)
|
||||
|
||||
|
||||
def _validate_hostroutes(data, valid_values=None):
|
||||
if not isinstance(data, list):
|
||||
msg = _("Invalid data format for hostroute: '%s'") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
expected_keys = ['destination', 'nexthop']
|
||||
hostroutes = []
|
||||
for hostroute in data:
|
||||
msg = _verify_dict_keys(expected_keys, hostroute)
|
||||
if msg:
|
||||
return msg
|
||||
msg = _validate_subnet(hostroute['destination'])
|
||||
if msg:
|
||||
return msg
|
||||
msg = _validate_ip_address(hostroute['nexthop'])
|
||||
if msg:
|
||||
return msg
|
||||
if hostroute in hostroutes:
|
||||
msg = _("Duplicate hostroute '%s'") % hostroute
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
hostroutes.append(hostroute)
|
||||
|
||||
|
||||
def _validate_ip_address_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_ip_address(data, valid_values)
|
||||
|
||||
|
||||
def _validate_subnet(data, valid_values=None):
|
||||
msg = None
|
||||
try:
|
||||
net = netaddr.IPNetwork(_validate_no_whitespace(data))
|
||||
if '/' not in data or (net.version == 4 and str(net) != data):
|
||||
msg = _("'%(data)s' isn't a recognized IP subnet cidr,"
|
||||
" '%(cidr)s' is recommended") % {"data": data,
|
||||
"cidr": net.cidr}
|
||||
else:
|
||||
return
|
||||
except Exception:
|
||||
msg = _("'%s' is not a valid IP subnet") % data
|
||||
if msg:
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_subnet_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_subnet(data, valid_values)
|
||||
|
||||
|
||||
_validate_subnet_list = functools.partial(_validate_list_of_items,
|
||||
_validate_subnet)
|
||||
|
||||
|
||||
def _validate_regex(data, valid_values=None):
|
||||
try:
|
||||
if re.match(valid_values, data):
|
||||
return
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
msg = _("'%s' is not a valid input") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_regex_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_regex(data, valid_values)
|
||||
|
||||
|
||||
def _validate_subnetpool_id(data, valid_values=None):
|
||||
if data != constants.IPV6_PD_POOL_ID:
|
||||
return _validate_uuid_or_none(data, valid_values)
|
||||
|
||||
|
||||
def _validate_subnetpool_id_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_subnetpool_id(data, valid_values)
|
||||
|
||||
|
||||
def _validate_uuid(data, valid_values=None):
|
||||
if not uuidutils.is_uuid_like(data):
|
||||
msg = _("'%s' is not a valid UUID") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_uuid_or_none(data, valid_values=None):
|
||||
if data is not None:
|
||||
return _validate_uuid(data)
|
||||
|
||||
|
||||
_validate_uuid_list = functools.partial(_validate_list_of_items,
|
||||
_validate_uuid)
|
||||
|
||||
|
||||
def _validate_dict_item(key, key_validator, data):
|
||||
# Find conversion function, if any, and apply it
|
||||
conv_func = key_validator.get('convert_to')
|
||||
if conv_func:
|
||||
data[key] = conv_func(data.get(key))
|
||||
# Find validator function
|
||||
# TODO(salv-orlando): Structure of dict attributes should be improved
|
||||
# to avoid iterating over items
|
||||
val_func = val_params = None
|
||||
for (k, v) in six.iteritems(key_validator):
|
||||
if k.startswith('type:'):
|
||||
# ask forgiveness, not permission
|
||||
try:
|
||||
val_func = validators[k]
|
||||
except KeyError:
|
||||
msg = _("Validator '%s' does not exist.") % k
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
val_params = v
|
||||
break
|
||||
# Process validation
|
||||
if val_func:
|
||||
return val_func(data.get(key), val_params)
|
||||
|
||||
|
||||
def _validate_dict(data, key_specs=None):
|
||||
if not isinstance(data, dict):
|
||||
msg = _("'%s' is not a dictionary") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
# Do not perform any further validation, if no constraints are supplied
|
||||
if not key_specs:
|
||||
return
|
||||
|
||||
# Check whether all required keys are present
|
||||
required_keys = [key for key, spec in six.iteritems(key_specs)
|
||||
if spec.get('required')]
|
||||
|
||||
if required_keys:
|
||||
msg = _verify_dict_keys(required_keys, data, False)
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
# Perform validation and conversion of all values
|
||||
# according to the specifications.
|
||||
for key, key_validator in [(k, v) for k, v in six.iteritems(key_specs)
|
||||
if k in data]:
|
||||
msg = _validate_dict_item(key, key_validator, data)
|
||||
if msg:
|
||||
return msg
|
||||
|
||||
|
||||
def _validate_dict_or_none(data, key_specs=None):
|
||||
if data is not None:
|
||||
return _validate_dict(data, key_specs)
|
||||
|
||||
|
||||
def _validate_dict_or_empty(data, key_specs=None):
|
||||
if data != {}:
|
||||
return _validate_dict(data, key_specs)
|
||||
|
||||
|
||||
def _validate_dict_or_nodata(data, key_specs=None):
|
||||
if data:
|
||||
return _validate_dict(data, key_specs)
|
||||
|
||||
|
||||
def _validate_non_negative(data, valid_values=None):
|
||||
try:
|
||||
data = int(data)
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' is not an integer") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
if data < 0:
|
||||
msg = _("'%s' should be non-negative") % data
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def convert_to_boolean(data):
|
||||
if isinstance(data, six.string_types):
|
||||
val = data.lower()
|
||||
if val == "true" or val == "1":
|
||||
return True
|
||||
if val == "false" or val == "0":
|
||||
return False
|
||||
elif isinstance(data, bool):
|
||||
return data
|
||||
elif isinstance(data, int):
|
||||
if data == 0:
|
||||
return False
|
||||
elif data == 1:
|
||||
return True
|
||||
msg = _("'%s' cannot be converted to boolean") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
def convert_to_boolean_if_not_none(data):
|
||||
if data is not None:
|
||||
return convert_to_boolean(data)
|
||||
|
||||
|
||||
def convert_to_int(data):
|
||||
try:
|
||||
return int(data)
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' is not an integer") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
def convert_to_int_if_not_none(data):
|
||||
if data is not None:
|
||||
return convert_to_int(data)
|
||||
return data
|
||||
|
||||
|
||||
def convert_to_positive_float_or_none(val):
|
||||
# NOTE(salv-orlando): This conversion function is currently used by
|
||||
# a vendor specific extension only at the moment It is used for
|
||||
# port's RXTX factor in neutron.plugins.vmware.extensions.qos.
|
||||
# It is deemed however generic enough to be in this module as it
|
||||
# might be used in future for other API attributes.
|
||||
if val is None:
|
||||
return
|
||||
try:
|
||||
val = float(val)
|
||||
if val < 0:
|
||||
raise ValueError()
|
||||
except (ValueError, TypeError):
|
||||
msg = _("'%s' must be a non negative decimal.") % val
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
return val
|
||||
|
||||
|
||||
def convert_kvp_str_to_list(data):
|
||||
"""Convert a value of the form 'key=value' to ['key', 'value'].
|
||||
|
||||
:raises: n_exc.InvalidInput if any of the strings are malformed
|
||||
(e.g. do not contain a key).
|
||||
"""
|
||||
kvp = [x.strip() for x in data.split('=', 1)]
|
||||
if len(kvp) == 2 and kvp[0]:
|
||||
return kvp
|
||||
msg = _("'%s' is not of the form <key>=[value]") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
def convert_kvp_list_to_dict(kvp_list):
|
||||
"""Convert a list of 'key=value' strings to a dict.
|
||||
|
||||
:raises: n_exc.InvalidInput if any of the strings are malformed
|
||||
(e.g. do not contain a key) or if any
|
||||
of the keys appear more than once.
|
||||
"""
|
||||
if kvp_list == ['True']:
|
||||
# No values were provided (i.e. '--flag-name')
|
||||
return {}
|
||||
kvp_map = {}
|
||||
for kvp_str in kvp_list:
|
||||
key, value = convert_kvp_str_to_list(kvp_str)
|
||||
kvp_map.setdefault(key, set())
|
||||
kvp_map[key].add(value)
|
||||
return dict((x, list(y)) for x, y in six.iteritems(kvp_map))
|
||||
|
||||
|
||||
def convert_none_to_empty_list(value):
|
||||
return [] if value is None else value
|
||||
|
||||
|
||||
def convert_none_to_empty_dict(value):
|
||||
return {} if value is None else value
|
||||
|
||||
|
||||
def convert_to_list(data):
|
||||
if data is None:
|
||||
return []
|
||||
elif hasattr(data, '__iter__') and not isinstance(data, six.string_types):
|
||||
return list(data)
|
||||
else:
|
||||
return [data]
|
||||
|
||||
|
||||
# Note: In order to ensure that the MAC address is unicast the first byte
|
||||
# must be even.
|
||||
MAC_PATTERN = "^%s[aceACE02468](:%s{2}){5}$" % (constants.HEX_ELEM,
|
||||
constants.HEX_ELEM)
|
||||
|
||||
# Dictionary that maintains a list of validation functions
|
||||
validators = {'type:dict': _validate_dict,
|
||||
'type:dict_or_none': _validate_dict_or_none,
|
||||
'type:dict_or_empty': _validate_dict_or_empty,
|
||||
'type:dict_or_nodata': _validate_dict_or_nodata,
|
||||
'type:fixed_ips': _validate_fixed_ips,
|
||||
'type:hostroutes': _validate_hostroutes,
|
||||
'type:ip_address': _validate_ip_address,
|
||||
'type:ip_address_or_none': _validate_ip_address_or_none,
|
||||
'type:ip_pools': _validate_ip_pools,
|
||||
'type:mac_address': _validate_mac_address,
|
||||
'type:mac_address_or_none': _validate_mac_address_or_none,
|
||||
'type:nameservers': _validate_nameservers,
|
||||
'type:non_negative': _validate_non_negative,
|
||||
'type:range': _validate_range,
|
||||
'type:regex': _validate_regex,
|
||||
'type:regex_or_none': _validate_regex_or_none,
|
||||
'type:string': _validate_string,
|
||||
'type:string_or_none': _validate_string_or_none,
|
||||
'type:not_empty_string': _validate_not_empty_string,
|
||||
'type:not_empty_string_or_none':
|
||||
_validate_not_empty_string_or_none,
|
||||
'type:subnet': _validate_subnet,
|
||||
'type:subnet_list': _validate_subnet_list,
|
||||
'type:subnet_or_none': _validate_subnet_or_none,
|
||||
'type:subnetpool_id': _validate_subnetpool_id,
|
||||
'type:subnetpool_id_or_none': _validate_subnetpool_id_or_none,
|
||||
'type:uuid': _validate_uuid,
|
||||
'type:uuid_or_none': _validate_uuid_or_none,
|
||||
'type:uuid_list': _validate_uuid_list,
|
||||
'type:values': _validate_values,
|
||||
'type:boolean': _validate_boolean,
|
||||
'type:list_of_unique_strings': validate_list_of_unique_strings}
|
||||
|
||||
# Define constants for base resource name
|
||||
NETWORK = 'network'
|
||||
@ -687,7 +156,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'is_visible': True},
|
||||
'admin_state_up': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'status': {'allow_post': False, 'allow_put': False,
|
||||
'is_visible': True},
|
||||
@ -698,7 +167,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
SHARED: {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
@ -717,7 +186,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'is_visible': True},
|
||||
'admin_state_up': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'mac_address': {'allow_post': True, 'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
@ -726,7 +195,8 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'is_visible': True},
|
||||
'fixed_ips': {'allow_post': True, 'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'convert_list_to': convert_kvp_list_to_dict,
|
||||
'convert_list_to':
|
||||
lib_converters.convert_kvp_list_to_dict,
|
||||
'validate': {'type:fixed_ips': None},
|
||||
'enforce_policy': True,
|
||||
'is_visible': True},
|
||||
@ -754,7 +224,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'validate': {'type:string': NAME_MAX_LEN},
|
||||
'is_visible': True},
|
||||
'ip_version': {'allow_post': True, 'allow_put': False,
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'validate': {'type:values': [4, 6]},
|
||||
'is_visible': True},
|
||||
'network_id': {'allow_post': True, 'allow_put': False,
|
||||
@ -770,7 +240,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'prefixlen': {'allow_post': True,
|
||||
'allow_put': False,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'required_by_policy': False,
|
||||
'is_visible': False},
|
||||
@ -789,12 +259,14 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'validate': {'type:ip_pools': None},
|
||||
'is_visible': True},
|
||||
'dns_nameservers': {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': convert_none_to_empty_list,
|
||||
'convert_to':
|
||||
lib_converters.convert_none_to_empty_list,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:nameservers': None},
|
||||
'is_visible': True},
|
||||
'host_routes': {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': convert_none_to_empty_list,
|
||||
'convert_to':
|
||||
lib_converters.convert_none_to_empty_list,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:hostroutes': None},
|
||||
'is_visible': True},
|
||||
@ -804,7 +276,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'is_visible': True},
|
||||
'enable_dhcp': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'ipv6_ra_mode': {'allow_post': True, 'allow_put': False,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
@ -818,7 +290,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
SHARED: {'allow_post': False,
|
||||
'allow_put': False,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': False,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
@ -845,7 +317,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'default_quota': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'is_visible': True},
|
||||
'ip_version': {'allow_post': False,
|
||||
@ -854,32 +326,32 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'default_prefixlen': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'is_visible': True},
|
||||
'min_prefixlen': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'is_visible': True},
|
||||
'max_prefixlen': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'validate': {'type:non_negative': None},
|
||||
'convert_to': convert_to_int,
|
||||
'convert_to': lib_converters.convert_to_int,
|
||||
'is_visible': True},
|
||||
'is_default': {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
SHARED: {'allow_post': True,
|
||||
'allow_put': False,
|
||||
'default': False,
|
||||
'convert_to': convert_to_boolean,
|
||||
'convert_to': lib_converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
@ -934,8 +406,7 @@ def fill_default_value(attr_info, res_dict,
|
||||
check_allow_post=True):
|
||||
for attr, attr_vals in six.iteritems(attr_info):
|
||||
if attr_vals['allow_post']:
|
||||
if ('default' not in attr_vals and
|
||||
attr not in res_dict):
|
||||
if 'default' not in attr_vals and attr not in res_dict:
|
||||
msg = _("Failed to parse request. Required "
|
||||
"attribute '%s' not specified") % attr
|
||||
raise exc_cls(msg)
|
||||
@ -959,7 +430,8 @@ def convert_value(attr_info, res_dict, exc_cls=ValueError):
|
||||
if 'validate' not in attr_vals:
|
||||
continue
|
||||
for rule in attr_vals['validate']:
|
||||
res = validators[rule](res_dict[attr], attr_vals['validate'][rule])
|
||||
res = lib_validators.validators[rule](res_dict[attr],
|
||||
attr_vals['validate'][rule])
|
||||
if res:
|
||||
msg_dict = dict(attr=attr, reason=res)
|
||||
msg = _("Invalid input for %(attr)s. "
|
||||
|
@ -19,23 +19,33 @@ from neutron._i18n import _
|
||||
|
||||
|
||||
class _DeprecateSubset(object):
|
||||
additional = {}
|
||||
|
||||
def __init__(self, my_globals, other_mod):
|
||||
self.other_mod = other_mod
|
||||
self.my_globals = copy.copy(my_globals)
|
||||
|
||||
@classmethod
|
||||
def and_also(cls, name, other_mod):
|
||||
cls.additional[name] = other_mod
|
||||
|
||||
def __getattr__(self, name):
|
||||
a = self.my_globals.get(name)
|
||||
if (not name.startswith("__") and not inspect.ismodule(a) and
|
||||
name in vars(self.other_mod)):
|
||||
if not name.startswith("__") and not inspect.ismodule(a):
|
||||
other_mod = self.additional.get(name) or self.other_mod
|
||||
if name in vars(other_mod):
|
||||
|
||||
debtcollector.deprecate(
|
||||
name,
|
||||
message='moved to neutron_lib',
|
||||
version='mitaka',
|
||||
removal_version='newton',
|
||||
stacklevel=4)
|
||||
# These should be enabled after most have been cleaned up
|
||||
# in neutron proper, which may not happen during the busy M-3.
|
||||
|
||||
return vars(self.other_mod)[name]
|
||||
debtcollector.deprecate(
|
||||
name,
|
||||
message='moved to %s' % other_mod.__name__,
|
||||
version='mitaka',
|
||||
removal_version='newton',
|
||||
stacklevel=4)
|
||||
|
||||
return vars(other_mod)[name]
|
||||
|
||||
try:
|
||||
return self.my_globals[name]
|
||||
|
@ -20,6 +20,7 @@ Routines for configuring Neutron
|
||||
import sys
|
||||
|
||||
from keystoneauth1 import loading as ks_loading
|
||||
from neutron_lib.api import validators
|
||||
from oslo_config import cfg
|
||||
from oslo_db import options as db_options
|
||||
from oslo_log import log as logging
|
||||
@ -28,7 +29,6 @@ from oslo_middleware import cors
|
||||
from oslo_service import wsgi
|
||||
|
||||
from neutron._i18n import _, _LI
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import constants
|
||||
from neutron.common import utils
|
||||
from neutron import policy
|
||||
@ -241,8 +241,7 @@ def init(args, **kwargs):
|
||||
n_rpc.init(cfg.CONF)
|
||||
|
||||
# Validate that the base_mac is of the correct format
|
||||
msg = attributes._validate_regex(cfg.CONF.base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
msg = validators.validate_regex(cfg.CONF.base_mac, validators.MAC_PATTERN)
|
||||
if msg:
|
||||
msg = _("Base MAC: %s") % msg
|
||||
raise Exception(msg)
|
||||
|
@ -16,6 +16,7 @@
|
||||
import datetime
|
||||
|
||||
from eventlet import greenthread
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from oslo_config import cfg
|
||||
from oslo_db import exception as db_exc
|
||||
@ -282,7 +283,7 @@ class AgentDbMixin(ext_agent.AgentPluginBase, AgentAvailabilityZoneMixin):
|
||||
filters=filters, fields=fields)
|
||||
alive = filters and filters.get('alive', None)
|
||||
if alive:
|
||||
alive = attributes.convert_to_boolean(alive[0])
|
||||
alive = converters.convert_to_boolean(alive[0])
|
||||
agents = [agent for agent in agents if agent['alive'] == alive]
|
||||
return agents
|
||||
|
||||
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from oslo_db import exception as db_exc
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from neutron.api.v2 import attributes as attr
|
||||
@ -43,7 +43,7 @@ class AllowedAddressPairsMixin(object):
|
||||
|
||||
def _process_create_allowed_address_pairs(self, context, port,
|
||||
allowed_address_pairs):
|
||||
if not attr.is_attr_set(allowed_address_pairs):
|
||||
if not validators.is_attr_set(allowed_address_pairs):
|
||||
return []
|
||||
try:
|
||||
with context.session.begin(subtransactions=True):
|
||||
@ -95,7 +95,7 @@ class AllowedAddressPairsMixin(object):
|
||||
return self._fields(res, fields)
|
||||
|
||||
def _has_address_pairs(self, port):
|
||||
return (attr.is_attr_set(port['port'][addr_pair.ADDRESS_PAIRS])
|
||||
return (validators.is_attr_set(port['port'][addr_pair.ADDRESS_PAIRS])
|
||||
and port['port'][addr_pair.ADDRESS_PAIRS] != [])
|
||||
|
||||
def _check_update_has_allowed_address_pairs(self, port):
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import functools
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
@ -309,9 +310,9 @@ class DbBasePluginCommon(common_db_mixin.CommonDbMixin):
|
||||
'gateway_ip': gateway_ip,
|
||||
'description': subnet.get('description')}
|
||||
if subnet['ip_version'] == 6 and subnet['enable_dhcp']:
|
||||
if attributes.is_attr_set(subnet['ipv6_ra_mode']):
|
||||
if validators.is_attr_set(subnet['ipv6_ra_mode']):
|
||||
args['ipv6_ra_mode'] = subnet['ipv6_ra_mode']
|
||||
if attributes.is_attr_set(subnet['ipv6_address_mode']):
|
||||
if validators.is_attr_set(subnet['ipv6_address_mode']):
|
||||
args['ipv6_address_mode'] = subnet['ipv6_address_mode']
|
||||
return args
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
import functools
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as exc
|
||||
from oslo_config import cfg
|
||||
@ -231,8 +232,8 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
if cur_subnet:
|
||||
self._validate_ipv6_update_dhcp(subnet, cur_subnet)
|
||||
return
|
||||
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = attributes.is_attr_set(
|
||||
ra_mode_set = validators.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = validators.is_attr_set(
|
||||
subnet.get('ipv6_address_mode'))
|
||||
self._validate_ipv6_dhcp(ra_mode_set, address_mode_set,
|
||||
subnet['enable_dhcp'])
|
||||
@ -274,16 +275,16 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
msg = _("Cannot disable enable_dhcp with "
|
||||
"ipv6 attributes set")
|
||||
|
||||
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = attributes.is_attr_set(
|
||||
ra_mode_set = validators.is_attr_set(subnet.get('ipv6_ra_mode'))
|
||||
address_mode_set = validators.is_attr_set(
|
||||
subnet.get('ipv6_address_mode'))
|
||||
|
||||
if ra_mode_set or address_mode_set:
|
||||
raise exc.InvalidInput(error_message=msg)
|
||||
|
||||
old_ra_mode_set = attributes.is_attr_set(
|
||||
old_ra_mode_set = validators.is_attr_set(
|
||||
cur_subnet.get('ipv6_ra_mode'))
|
||||
old_address_mode_set = attributes.is_attr_set(
|
||||
old_address_mode_set = validators.is_attr_set(
|
||||
cur_subnet.get('ipv6_address_mode'))
|
||||
|
||||
if old_ra_mode_set or old_address_mode_set:
|
||||
@ -439,7 +440,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
|
||||
ip_ver = s['ip_version']
|
||||
|
||||
if attributes.is_attr_set(s.get('cidr')):
|
||||
if validators.is_attr_set(s.get('cidr')):
|
||||
self._validate_ip_version(ip_ver, s['cidr'], 'cidr')
|
||||
|
||||
# TODO(watanabe.isao): After we found a way to avoid the re-sync
|
||||
@ -466,7 +467,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
"if enable_dhcp is True.")
|
||||
raise exc.InvalidInput(error_message=error_message)
|
||||
|
||||
if attributes.is_attr_set(s.get('gateway_ip')):
|
||||
if validators.is_attr_set(s.get('gateway_ip')):
|
||||
self._validate_ip_version(ip_ver, s['gateway_ip'], 'gateway_ip')
|
||||
is_gateway_not_valid = (
|
||||
ipam.utils.check_gateway_invalid_in_subnet(
|
||||
@ -491,7 +492,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
ip_address=cur_subnet['gateway_ip'],
|
||||
port_id=allocated['port_id'])
|
||||
|
||||
if attributes.is_attr_set(s.get('dns_nameservers')):
|
||||
if validators.is_attr_set(s.get('dns_nameservers')):
|
||||
if len(s['dns_nameservers']) > cfg.CONF.max_dns_nameservers:
|
||||
raise n_exc.DNSNameServersExhausted(
|
||||
subnet_id=s.get('id', _('new subnet')),
|
||||
@ -505,7 +506,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
dns))
|
||||
self._validate_ip_version(ip_ver, dns, 'dns_nameserver')
|
||||
|
||||
if attributes.is_attr_set(s.get('host_routes')):
|
||||
if validators.is_attr_set(s.get('host_routes')):
|
||||
if len(s['host_routes']) > cfg.CONF.max_subnet_host_routes:
|
||||
raise n_exc.HostRoutesExhausted(
|
||||
subnet_id=s.get('id', _('new subnet')),
|
||||
@ -515,11 +516,11 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
self._validate_host_route(rt, ip_ver)
|
||||
|
||||
if ip_ver == 4:
|
||||
if attributes.is_attr_set(s.get('ipv6_ra_mode')):
|
||||
if validators.is_attr_set(s.get('ipv6_ra_mode')):
|
||||
raise exc.InvalidInput(
|
||||
error_message=(_("ipv6_ra_mode is not valid when "
|
||||
"ip_version is 4")))
|
||||
if attributes.is_attr_set(s.get('ipv6_address_mode')):
|
||||
if validators.is_attr_set(s.get('ipv6_address_mode')):
|
||||
raise exc.InvalidInput(
|
||||
error_message=(_("ipv6_address_mode is not valid when "
|
||||
"ip_version is 4")))
|
||||
@ -626,11 +627,11 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
return
|
||||
|
||||
cidr = subnet.get('cidr')
|
||||
if attributes.is_attr_set(cidr):
|
||||
if validators.is_attr_set(cidr):
|
||||
ip_version = netaddr.IPNetwork(cidr).version
|
||||
else:
|
||||
ip_version = subnet.get('ip_version')
|
||||
if not attributes.is_attr_set(ip_version):
|
||||
if not validators.is_attr_set(ip_version):
|
||||
msg = _('ip_version must be specified in the absence of '
|
||||
'cidr and subnetpool_id')
|
||||
raise exc.BadRequest(resource='subnets', msg=msg)
|
||||
@ -658,8 +659,8 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
s = subnet['subnet']
|
||||
cidr = s.get('cidr', constants.ATTR_NOT_SPECIFIED)
|
||||
prefixlen = s.get('prefixlen', constants.ATTR_NOT_SPECIFIED)
|
||||
has_cidr = attributes.is_attr_set(cidr)
|
||||
has_prefixlen = attributes.is_attr_set(prefixlen)
|
||||
has_cidr = validators.is_attr_set(cidr)
|
||||
has_prefixlen = validators.is_attr_set(prefixlen)
|
||||
|
||||
if has_cidr and has_prefixlen:
|
||||
msg = _('cidr and prefixlen must not be supplied together')
|
||||
@ -915,7 +916,7 @@ class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
|
||||
- the address family of the subnetpool and address scope
|
||||
are the same
|
||||
"""
|
||||
if not attributes.is_attr_set(address_scope_id):
|
||||
if not validators.is_attr_set(address_scope_id):
|
||||
return
|
||||
|
||||
if not self.is_address_scope_owned_by_tenant(context,
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
@ -20,7 +21,6 @@ import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from neutron._i18n import _, _LE
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import utils
|
||||
from neutron.db import db_base_plugin_v2
|
||||
from neutron.db import l3_db
|
||||
@ -154,7 +154,7 @@ class DNSDbMixin(object):
|
||||
floatingip_data, req_data):
|
||||
# expects to be called within a plugin's session
|
||||
dns_domain = req_data.get(dns.DNSDOMAIN)
|
||||
if not attributes.is_attr_set(dns_domain):
|
||||
if not validators.is_attr_set(dns_domain):
|
||||
return
|
||||
if not self.dns_driver:
|
||||
return
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as l3_constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
import sqlalchemy as sa
|
||||
@ -118,7 +119,7 @@ class External_net_db_mixin(object):
|
||||
|
||||
def _process_l3_create(self, context, net_data, req_data):
|
||||
external = req_data.get(external_net.EXTERNAL)
|
||||
external_set = attributes.is_attr_set(external)
|
||||
external_set = validators.is_attr_set(external)
|
||||
|
||||
if not external_set:
|
||||
return
|
||||
@ -157,7 +158,7 @@ class External_net_db_mixin(object):
|
||||
|
||||
new_value = req_data.get(external_net.EXTERNAL)
|
||||
net_id = net_data['id']
|
||||
if not attributes.is_attr_set(new_value):
|
||||
if not validators.is_attr_set(new_value):
|
||||
return
|
||||
|
||||
if net_data.get(external_net.EXTERNAL) == new_value:
|
||||
|
@ -17,6 +17,7 @@ import collections
|
||||
import itertools
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from neutron_lib import exceptions as exc
|
||||
from oslo_config import cfg
|
||||
@ -25,7 +26,6 @@ from oslo_log import log as logging
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from neutron._i18n import _, _LI
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import constants
|
||||
from neutron.common import exceptions as n_exc
|
||||
from neutron.common import ipv6_utils
|
||||
@ -79,8 +79,8 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
||||
|
||||
Allocation pools can be set for specific subnet request only
|
||||
"""
|
||||
has_allocpool = attributes.is_attr_set(subnet['allocation_pools'])
|
||||
is_any_subnetpool_request = not attributes.is_attr_set(subnet['cidr'])
|
||||
has_allocpool = validators.is_attr_set(subnet['allocation_pools'])
|
||||
is_any_subnetpool_request = not validators.is_attr_set(subnet['cidr'])
|
||||
if is_any_subnetpool_request and has_allocpool:
|
||||
reason = _("allocation_pools allowed only "
|
||||
"for specific subnet requests.")
|
||||
@ -89,7 +89,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
||||
def _validate_ip_version_with_subnetpool(self, subnet, subnetpool):
|
||||
"""Validates ip version for subnet_pool and requested subnet"""
|
||||
ip_version = subnet.get('ip_version')
|
||||
has_ip_version = attributes.is_attr_set(ip_version)
|
||||
has_ip_version = validators.is_attr_set(ip_version)
|
||||
if has_ip_version and ip_version != subnetpool.ip_version:
|
||||
args = {'req_ver': str(subnet['ip_version']),
|
||||
'pool_ver': str(subnetpool.ip_version)}
|
||||
@ -350,7 +350,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
||||
|
||||
def _prepare_allocation_pools(self, allocation_pools, cidr, gateway_ip):
|
||||
"""Returns allocation pools represented as list of IPRanges"""
|
||||
if not attributes.is_attr_set(allocation_pools):
|
||||
if not validators.is_attr_set(allocation_pools):
|
||||
return self.generate_pools(cidr, gateway_ip)
|
||||
|
||||
ip_range_pools = self.pools_to_ip_range(allocation_pools)
|
||||
@ -450,7 +450,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
||||
context.session.add(subnet)
|
||||
# NOTE(changzhi) Store DNS nameservers with order into DB one
|
||||
# by one when create subnet with DNS nameservers
|
||||
if attributes.is_attr_set(dns_nameservers):
|
||||
if validators.is_attr_set(dns_nameservers):
|
||||
for order, server in enumerate(dns_nameservers):
|
||||
dns = models_v2.DNSNameServer(
|
||||
address=server,
|
||||
@ -458,7 +458,7 @@ class IpamBackendMixin(db_base_plugin_common.DbBasePluginCommon):
|
||||
subnet_id=subnet.id)
|
||||
context.session.add(dns)
|
||||
|
||||
if attributes.is_attr_set(host_routes):
|
||||
if validators.is_attr_set(host_routes):
|
||||
for rt in host_routes:
|
||||
route = models_v2.SubnetRoute(
|
||||
subnet_id=subnet.id,
|
||||
|
@ -13,6 +13,7 @@
|
||||
# under the License.
|
||||
import collections
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
@ -22,7 +23,6 @@ from oslo_utils import excutils
|
||||
import six
|
||||
|
||||
from neutron._i18n import _, _LI, _LW
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.callbacks import events
|
||||
from neutron.callbacks import exceptions
|
||||
from neutron.callbacks import registry
|
||||
@ -952,6 +952,6 @@ def is_distributed_router(router):
|
||||
except AttributeError:
|
||||
# if not, try to see if it is a request body
|
||||
requested_router_type = router.get('distributed')
|
||||
if attributes.is_attr_set(requested_router_type):
|
||||
if validators.is_attr_set(requested_router_type):
|
||||
return requested_router_type
|
||||
return cfg.CONF.router_distributed
|
||||
|
@ -16,6 +16,7 @@
|
||||
import functools
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
@ -423,7 +424,7 @@ class L3_HA_NAT_db_mixin(l3_dvr_db.L3_NAT_with_dvr_db_mixin,
|
||||
@classmethod
|
||||
def _is_ha(cls, router):
|
||||
ha = router.get('ha')
|
||||
if not attributes.is_attr_set(ha):
|
||||
if not validators.is_attr_set(ha):
|
||||
ha = cfg.CONF.l3_ha
|
||||
return ha
|
||||
|
||||
@ -756,6 +757,6 @@ def is_ha_router(router):
|
||||
except AttributeError:
|
||||
# if not, try to see if it is a request body
|
||||
requested_router_type = router.get('ha')
|
||||
if attributes.is_attr_set(requested_router_type):
|
||||
if validators.is_attr_set(requested_router_type):
|
||||
return requested_router_type
|
||||
return cfg.CONF.l3_ha
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
@ -62,12 +63,12 @@ class PortBindingMixin(portbindings_base.PortBindingBaseMixin):
|
||||
def _process_portbindings_create_and_update(self, context, port_data,
|
||||
port):
|
||||
binding_profile = port.get(portbindings.PROFILE)
|
||||
binding_profile_set = attributes.is_attr_set(binding_profile)
|
||||
binding_profile_set = validators.is_attr_set(binding_profile)
|
||||
if not binding_profile_set and binding_profile is not None:
|
||||
del port[portbindings.PROFILE]
|
||||
|
||||
binding_vnic = port.get(portbindings.VNIC_TYPE)
|
||||
binding_vnic_set = attributes.is_attr_set(binding_vnic)
|
||||
binding_vnic_set = validators.is_attr_set(binding_vnic)
|
||||
if not binding_vnic_set and binding_vnic is not None:
|
||||
del port[portbindings.VNIC_TYPE]
|
||||
# REVISIT(irenab) Add support for vnic_type for plugins that
|
||||
@ -76,7 +77,7 @@ class PortBindingMixin(portbindings_base.PortBindingBaseMixin):
|
||||
# PortBindingMixin.
|
||||
|
||||
host = port_data.get(portbindings.HOST_ID)
|
||||
host_set = attributes.is_attr_set(host)
|
||||
host_set = validators.is_attr_set(host)
|
||||
with context.session.begin(subtransactions=True):
|
||||
bind_port = context.session.query(
|
||||
PortBindingPort).filter_by(port_id=port['id']).first()
|
||||
|
@ -12,6 +12,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
|
||||
from neutron.api.v2 import attributes as attrs
|
||||
from neutron.common import utils
|
||||
from neutron.db import db_base_plugin_v2
|
||||
@ -44,14 +46,14 @@ class PortSecurityDbMixin(portsecurity_db_common.PortSecurityDbCommon):
|
||||
if port.get('device_owner') and utils.is_port_trusted(port):
|
||||
return (False, has_ip)
|
||||
|
||||
if attrs.is_attr_set(port.get(psec.PORTSECURITY)):
|
||||
if validators.is_attr_set(port.get(psec.PORTSECURITY)):
|
||||
port_security_enabled = port[psec.PORTSECURITY]
|
||||
|
||||
# If port has an ip and security_groups are passed in
|
||||
# conveniently set port_security_enabled to true this way
|
||||
# user doesn't also have to pass in port_security_enabled=True
|
||||
# when creating ports.
|
||||
elif (has_ip and attrs.is_attr_set(port.get('security_groups'))):
|
||||
elif has_ip and validators.is_attr_set(port.get('security_groups')):
|
||||
port_security_enabled = True
|
||||
else:
|
||||
port_security_enabled = self._get_network_security_binding(
|
||||
|
@ -13,6 +13,7 @@
|
||||
# under the License.
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from oslo_db import exception as db_exc
|
||||
from oslo_log import log as logging
|
||||
@ -698,7 +699,7 @@ class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase):
|
||||
|
||||
def _process_port_create_security_group(self, context, port,
|
||||
security_group_ids):
|
||||
if attributes.is_attr_set(security_group_ids):
|
||||
if validators.is_attr_set(security_group_ids):
|
||||
for security_group_id in security_group_ids:
|
||||
self._create_port_security_group_binding(context, port['id'],
|
||||
security_group_id)
|
||||
@ -745,7 +746,7 @@ class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase):
|
||||
:returns: all security groups IDs on port belonging to tenant.
|
||||
"""
|
||||
port = port['port']
|
||||
if not attributes.is_attr_set(port.get(ext_sg.SECURITYGROUPS)):
|
||||
if not validators.is_attr_set(port.get(ext_sg.SECURITYGROUPS)):
|
||||
return
|
||||
if port.get('device_owner') and utils.is_port_trusted(port):
|
||||
return
|
||||
@ -773,7 +774,7 @@ class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase):
|
||||
return
|
||||
default_sg = self._ensure_default_security_group(context,
|
||||
port['tenant_id'])
|
||||
if not attributes.is_attr_set(port.get(ext_sg.SECURITYGROUPS)):
|
||||
if not validators.is_attr_set(port.get(ext_sg.SECURITYGROUPS)):
|
||||
port[ext_sg.SECURITYGROUPS] = [default_sg]
|
||||
|
||||
def _check_update_deletes_security_groups(self, port):
|
||||
@ -781,7 +782,7 @@ class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase):
|
||||
is either [] or not is_attr_set, otherwise return False
|
||||
"""
|
||||
if (ext_sg.SECURITYGROUPS in port['port'] and
|
||||
not (attributes.is_attr_set(port['port'][ext_sg.SECURITYGROUPS])
|
||||
not (validators.is_attr_set(port['port'][ext_sg.SECURITYGROUPS])
|
||||
and port['port'][ext_sg.SECURITYGROUPS] != [])):
|
||||
return True
|
||||
return False
|
||||
@ -792,7 +793,7 @@ class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase):
|
||||
This method is called both for port create and port update.
|
||||
"""
|
||||
if (ext_sg.SECURITYGROUPS in port['port'] and
|
||||
(attributes.is_attr_set(port['port'][ext_sg.SECURITYGROUPS]) and
|
||||
(validators.is_attr_set(port['port'][ext_sg.SECURITYGROUPS]) and
|
||||
port['port'][ext_sg.SECURITYGROUPS] != [])):
|
||||
return True
|
||||
return False
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import abc
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as nexception
|
||||
import six
|
||||
@ -51,12 +52,12 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
attr.SHARED: {'allow_post': True,
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'is_visible': True,
|
||||
'required_by_policy': True,
|
||||
'enforce_policy': True},
|
||||
'ip_version': {'allow_post': True, 'allow_put': False,
|
||||
'convert_to': attr.convert_to_int,
|
||||
'convert_to': converters.convert_to_int,
|
||||
'validate': {'type:values': [4, 6]},
|
||||
'is_visible': True},
|
||||
},
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import abc
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions
|
||||
|
||||
from neutron._i18n import _
|
||||
@ -40,7 +41,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'host': {'allow_post': False, 'allow_put': False,
|
||||
'is_visible': True},
|
||||
'admin_state_up': {'allow_post': False, 'allow_put': True,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'created_at': {'allow_post': False, 'allow_put': False,
|
||||
'is_visible': True},
|
||||
|
@ -12,6 +12,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as nexception
|
||||
from oslo_config import cfg
|
||||
@ -62,7 +64,7 @@ def _validate_allowed_address_pairs(address_pairs, valid_values=None):
|
||||
for address_pair in address_pairs:
|
||||
# mac_address is optional, if not set we use the mac on the port
|
||||
if 'mac_address' in address_pair:
|
||||
msg = attr._validate_mac_address(address_pair['mac_address'])
|
||||
msg = validators.validate_mac_address(address_pair['mac_address'])
|
||||
if msg:
|
||||
raise webob.exc.HTTPBadRequest(msg)
|
||||
if 'ip_address' not in address_pair:
|
||||
@ -85,22 +87,22 @@ def _validate_allowed_address_pairs(address_pairs, valid_values=None):
|
||||
raise webob.exc.HTTPBadRequest(msg)
|
||||
|
||||
if '/' in ip_address:
|
||||
msg = attr._validate_subnet(ip_address)
|
||||
msg = validators.validate_subnet(ip_address)
|
||||
else:
|
||||
msg = attr._validate_ip_address(ip_address)
|
||||
msg = validators.validate_ip_address(ip_address)
|
||||
if msg:
|
||||
raise webob.exc.HTTPBadRequest(msg)
|
||||
|
||||
attr.validators['type:validate_allowed_address_pairs'] = (
|
||||
validators.validators['type:validate_allowed_address_pairs'] = (
|
||||
_validate_allowed_address_pairs)
|
||||
|
||||
ADDRESS_PAIRS = 'allowed_address_pairs'
|
||||
EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'ports': {
|
||||
ADDRESS_PAIRS: {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attr.convert_none_to_empty_list,
|
||||
'convert_to': converters.convert_none_to_empty_list,
|
||||
'convert_list_to':
|
||||
attr.convert_kvp_list_to_dict,
|
||||
converters.convert_kvp_list_to_dict,
|
||||
'validate': {'type:validate_allowed_address_pairs':
|
||||
None},
|
||||
'enforce_policy': True,
|
||||
|
@ -14,8 +14,9 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes as attr
|
||||
from neutron.api.v2 import base
|
||||
from neutron.services.auto_allocate import plugin
|
||||
|
||||
@ -37,7 +38,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'is_visible': True,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'enforce_policy': True,
|
||||
'required_by_policy': True}},
|
||||
}
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import abc
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions
|
||||
from oslo_serialization import jsonutils
|
||||
import six
|
||||
@ -39,7 +40,7 @@ def convert_az_string_to_list(az_string):
|
||||
|
||||
def _validate_availability_zone_hints(data, valid_value=None):
|
||||
# syntax check only here. existence of az will be checked later.
|
||||
msg = attr.validate_list_of_unique_strings(data)
|
||||
msg = validators.validate_list_of_unique_strings(data)
|
||||
if msg:
|
||||
return msg
|
||||
az_string = convert_az_list_to_string(data)
|
||||
@ -48,7 +49,7 @@ def _validate_availability_zone_hints(data, valid_value=None):
|
||||
raise exceptions.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
attr.validators['type:availability_zone_hints'] = (
|
||||
validators.validators['type:availability_zone_hints'] = (
|
||||
_validate_availability_zone_hints)
|
||||
|
||||
# Attribute Map
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions
|
||||
|
||||
from neutron._i18n import _
|
||||
@ -61,21 +62,21 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'required_by_policy': False,
|
||||
'enforce_policy': True},
|
||||
'advertise_floating_ip_host_routes': {
|
||||
'allow_post': True,
|
||||
'allow_put': True,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'validate': {'type:boolean': None},
|
||||
'is_visible': True, 'default': True,
|
||||
'required_by_policy': False,
|
||||
'enforce_policy': True},
|
||||
'allow_post': True,
|
||||
'allow_put': True,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'validate': {'type:boolean': None},
|
||||
'is_visible': True, 'default': True,
|
||||
'required_by_policy': False,
|
||||
'enforce_policy': True},
|
||||
'advertise_tenant_networks': {
|
||||
'allow_post': True,
|
||||
'allow_put': True,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'validate': {'type:boolean': None},
|
||||
'is_visible': True, 'default': True,
|
||||
'required_by_policy': False,
|
||||
'enforce_policy': True},
|
||||
'allow_post': True,
|
||||
'allow_put': True,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'validate': {'type:boolean': None},
|
||||
'is_visible': True, 'default': True,
|
||||
'required_by_policy': False,
|
||||
'enforce_policy': True},
|
||||
},
|
||||
'bgp-peers': {
|
||||
'id': {'allow_post': False, 'allow_put': False,
|
||||
|
@ -11,6 +11,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
|
||||
from neutron.api import extensions
|
||||
@ -22,7 +23,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'use_default_subnetpool': {'allow_post': True,
|
||||
'allow_put': False,
|
||||
'default': False,
|
||||
'convert_to': attributes.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'is_visible': False, },
|
||||
},
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
@ -82,7 +83,7 @@ def _validate_fip_dns_name(data, max_len=FQDN_MAX_LEN):
|
||||
|
||||
|
||||
def _validate_dns_domain(data, max_len=FQDN_MAX_LEN):
|
||||
msg = attr._validate_string(data)
|
||||
msg = validators.validate_string(data)
|
||||
if msg:
|
||||
return msg
|
||||
if not data:
|
||||
@ -196,9 +197,9 @@ def convert_to_lowercase(data):
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
||||
attr.validators['type:dns_name'] = (_validate_dns_name)
|
||||
attr.validators['type:fip_dns_name'] = (_validate_fip_dns_name)
|
||||
attr.validators['type:dns_domain'] = (_validate_dns_domain)
|
||||
validators.validators['type:dns_name'] = _validate_dns_name
|
||||
validators.validators['type:fip_dns_name'] = _validate_fip_dns_name
|
||||
validators.validators['type:dns_domain'] = _validate_dns_domain
|
||||
|
||||
|
||||
DNSNAME = 'dns_name'
|
||||
|
@ -14,13 +14,13 @@
|
||||
|
||||
import abc
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions
|
||||
import six
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
|
||||
DISTRIBUTED = 'distributed'
|
||||
EXTENDED_ATTRIBUTES_2_0 = {
|
||||
@ -29,7 +29,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'allow_put': True,
|
||||
'is_visible': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'convert_to': attributes.convert_to_boolean_if_not_none,
|
||||
'convert_to': converters.convert_to_boolean_if_not_none,
|
||||
'enforce_policy': True},
|
||||
}
|
||||
}
|
||||
|
@ -13,11 +13,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions as nexception
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes as attr
|
||||
|
||||
|
||||
class ExternalNetworkInUse(nexception.InUse):
|
||||
@ -32,7 +32,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'allow_put': True,
|
||||
'default': False,
|
||||
'is_visible': True,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'enforce_policy': True,
|
||||
'required_by_policy': True}}}
|
||||
|
||||
|
@ -13,11 +13,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes as attr
|
||||
|
||||
|
||||
# ExtraDHcpOpts Exceptions
|
||||
@ -42,7 +43,7 @@ EXTRA_DHCP_OPT_KEY_SPECS = {
|
||||
'required': True},
|
||||
'opt_value': {'type:not_empty_string_or_none': DHCP_OPT_VALUE_MAX_LEN,
|
||||
'required': True},
|
||||
'ip_version': {'convert_to': attr.convert_to_int,
|
||||
'ip_version': {'convert_to': converters.convert_to_int,
|
||||
'type:values': [4, 6],
|
||||
'required': False}
|
||||
}
|
||||
@ -54,15 +55,16 @@ def _validate_extra_dhcp_opt(data, key_specs=None):
|
||||
raise ExtraDhcpOptBadData(data=data)
|
||||
for d in data:
|
||||
if d['opt_name'] in VALID_BLANK_EXTRA_DHCP_OPTS:
|
||||
msg = attr._validate_string_or_none(d['opt_value'],
|
||||
DHCP_OPT_VALUE_MAX_LEN)
|
||||
msg = validators.validate_string_or_none(
|
||||
d['opt_value'], DHCP_OPT_VALUE_MAX_LEN)
|
||||
else:
|
||||
msg = attr._validate_dict(d, key_specs)
|
||||
msg = validators.validate_dict(d, key_specs)
|
||||
if msg:
|
||||
raise ExtraDhcpOptBadData(data=msg)
|
||||
|
||||
|
||||
attr.validators['type:list_of_extra_dhcp_opts'] = _validate_extra_dhcp_opt
|
||||
validators.validators['type:list_of_extra_dhcp_opts'] = (
|
||||
_validate_extra_dhcp_opt)
|
||||
|
||||
# Attribute Map
|
||||
EXTRADHCPOPTS = 'extra_dhcp_opts'
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as nexception
|
||||
|
||||
@ -41,7 +42,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'routers': {
|
||||
'routes': {'allow_post': False, 'allow_put': True,
|
||||
'validate': {'type:hostroutes': None},
|
||||
'convert_to': attr.convert_none_to_empty_list,
|
||||
'convert_to': converters.convert_none_to_empty_list,
|
||||
'is_visible': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED},
|
||||
}
|
||||
|
@ -12,6 +12,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions as nexception
|
||||
|
||||
from neutron._i18n import _
|
||||
@ -76,7 +78,7 @@ def _validate_flavor_service_type(validate_type, valid_values=None):
|
||||
if validate_type not in plugins:
|
||||
raise InvalidFlavorServiceType(service_type=validate_type)
|
||||
|
||||
attr.validators['type:validate_flavor_service_type'] = (
|
||||
validators.validators['type:validate_flavor_service_type'] = (
|
||||
_validate_flavor_service_type)
|
||||
|
||||
FLAVORS = 'flavors'
|
||||
@ -108,7 +110,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'validate': {'type:uuid_list': None},
|
||||
'is_visible': True, 'default': []},
|
||||
'enabled': {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attr.convert_to_boolean_if_not_none,
|
||||
'convert_to': converters.convert_to_boolean_if_not_none,
|
||||
'default': True,
|
||||
'is_visible': True},
|
||||
},
|
||||
@ -134,7 +136,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'validate': {'type:string': attr.TENANT_ID_MAX_LEN},
|
||||
'is_visible': True},
|
||||
'enabled': {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attr.convert_to_boolean_if_not_none,
|
||||
'convert_to': converters.convert_to_boolean_if_not_none,
|
||||
'is_visible': True, 'default': True},
|
||||
},
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import abc
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions as nexception
|
||||
from oslo_config import cfg
|
||||
|
||||
@ -94,7 +95,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'is_visible': True, 'default': ''},
|
||||
'admin_state_up': {'allow_post': True, 'allow_put': True,
|
||||
'default': True,
|
||||
'convert_to': attr.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'is_visible': True},
|
||||
'status': {'allow_post': False, 'allow_put': False,
|
||||
'is_visible': True},
|
||||
@ -111,7 +112,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'required': True},
|
||||
'external_fixed_ips': {
|
||||
'convert_list_to':
|
||||
attr.convert_kvp_list_to_dict,
|
||||
converters.convert_kvp_list_to_dict,
|
||||
'type:fixed_ips': None,
|
||||
'default': None,
|
||||
'required': False,
|
||||
|
@ -13,8 +13,9 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes as attrs
|
||||
from neutron.extensions import l3
|
||||
|
||||
|
||||
@ -29,9 +30,10 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
{'type:dict_or_nodata':
|
||||
{'network_id': {'type:uuid': None, 'required': True},
|
||||
'enable_snat': {'type:boolean': None, 'required': False,
|
||||
'convert_to': attrs.convert_to_boolean},
|
||||
'convert_to':
|
||||
converters.convert_to_boolean},
|
||||
'external_fixed_ips': {
|
||||
'convert_list_to': attrs.convert_kvp_list_to_dict,
|
||||
'convert_list_to': converters.convert_kvp_list_to_dict,
|
||||
'type:fixed_ips': None,
|
||||
'default': None,
|
||||
'required': False}
|
||||
|
@ -12,12 +12,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import constants as n_const
|
||||
|
||||
HA_INFO = 'ha'
|
||||
@ -26,7 +26,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
HA_INFO: {'allow_post': True, 'allow_put': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED, 'is_visible': True,
|
||||
'enforce_policy': True,
|
||||
'convert_to': attributes.convert_to_boolean_if_not_none}
|
||||
'convert_to': converters.convert_to_boolean_if_not_none}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
import abc
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions as nexception
|
||||
import six
|
||||
|
||||
@ -57,7 +58,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'is_visible': True},
|
||||
'shared': {'allow_post': True, 'allow_put': False,
|
||||
'is_visible': True, 'default': False,
|
||||
'convert_to': attr.convert_to_boolean}
|
||||
'convert_to': converters.convert_to_boolean}
|
||||
},
|
||||
'metering_label_rules': {
|
||||
'id': {'allow_post': False, 'allow_put': False,
|
||||
@ -71,7 +72,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'validate': {'type:values': ['ingress', 'egress']}},
|
||||
'excluded': {'allow_post': True, 'allow_put': False,
|
||||
'is_visible': True, 'default': False,
|
||||
'convert_to': attr.convert_to_boolean},
|
||||
'convert_to': converters.convert_to_boolean},
|
||||
'remote_ip_prefix': {'allow_post': True, 'allow_put': False,
|
||||
'is_visible': True, 'required_by_policy': True,
|
||||
'validate': {'type:subnet': None}},
|
||||
|
@ -13,13 +13,14 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as nexception
|
||||
import webob.exc
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes as attr
|
||||
from neutron.extensions import providernet as pnet
|
||||
|
||||
SEGMENTS = 'segments'
|
||||
@ -39,7 +40,7 @@ def _convert_and_validate_segments(segments, valid_values=None):
|
||||
segment.setdefault(pnet.PHYSICAL_NETWORK, constants.ATTR_NOT_SPECIFIED)
|
||||
segmentation_id = segment.get(pnet.SEGMENTATION_ID)
|
||||
if segmentation_id:
|
||||
segment[pnet.SEGMENTATION_ID] = attr.convert_to_int(
|
||||
segment[pnet.SEGMENTATION_ID] = converters.convert_to_int(
|
||||
segmentation_id)
|
||||
else:
|
||||
segment[pnet.SEGMENTATION_ID] = constants.ATTR_NOT_SPECIFIED
|
||||
@ -67,7 +68,7 @@ def check_duplicate_segments(segments, is_partial_func=None):
|
||||
raise SegmentsContainDuplicateEntry()
|
||||
|
||||
|
||||
attr.validators['type:convert_segments'] = (
|
||||
validators.validators['type:convert_segments'] = (
|
||||
_convert_and_validate_segments)
|
||||
|
||||
|
||||
@ -75,7 +76,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'networks': {
|
||||
SEGMENTS: {'allow_post': True, 'allow_put': True,
|
||||
'validate': {'type:convert_segments': None},
|
||||
'convert_list_to': attr.convert_kvp_list_to_dict,
|
||||
'convert_list_to': converters.convert_kvp_list_to_dict,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'enforce_policy': True,
|
||||
'is_visible': True},
|
||||
|
@ -12,12 +12,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as nexception
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
|
||||
|
||||
DEFAULT_PORT_SECURITY = True
|
||||
@ -37,14 +37,14 @@ PORTSECURITY = 'port_security_enabled'
|
||||
EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'networks': {
|
||||
PORTSECURITY: {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attributes.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'enforce_policy': True,
|
||||
'default': DEFAULT_PORT_SECURITY,
|
||||
'is_visible': True},
|
||||
},
|
||||
'ports': {
|
||||
PORTSECURITY: {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attributes.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'enforce_policy': True,
|
||||
'is_visible': True},
|
||||
|
@ -13,12 +13,13 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
|
||||
|
||||
NETWORK_TYPE = 'provider:network_type'
|
||||
@ -44,7 +45,7 @@ EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'enforce_policy': True,
|
||||
'is_visible': True},
|
||||
SEGMENTATION_ID: {'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attributes.convert_to_int,
|
||||
'convert_to': converters.convert_to_int,
|
||||
'enforce_policy': True,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'is_visible': True},
|
||||
@ -58,7 +59,7 @@ def _raise_if_updates_provider_attributes(attrs):
|
||||
This method is used for plugins that do not support
|
||||
updating provider networks.
|
||||
"""
|
||||
if any(attributes.is_attr_set(attrs.get(a)) for a in ATTRIBUTES):
|
||||
if any(validators.is_attr_set(attrs.get(a)) for a in ATTRIBUTES):
|
||||
msg = _("Plugin does not support updating provider attributes")
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
import abc
|
||||
import itertools
|
||||
|
||||
from neutron_lib.api import converters
|
||||
import six
|
||||
|
||||
from neutron.api import extensions
|
||||
@ -54,7 +55,7 @@ RESOURCE_ATTRIBUTE_MAP = {
|
||||
'validate': {'type:string': None}},
|
||||
'shared': {'allow_post': True, 'allow_put': True,
|
||||
'is_visible': True, 'default': False,
|
||||
'convert_to': attr.convert_to_boolean},
|
||||
'convert_to': converters.convert_to_boolean},
|
||||
'tenant_id': {'allow_post': True, 'allow_put': False,
|
||||
'required_by_policy': True,
|
||||
'is_visible': True},
|
||||
@ -86,7 +87,7 @@ SUB_RESOURCE_ATTRIBUTE_MAP = {
|
||||
'parameters': dict(QOS_RULE_COMMON_FIELDS,
|
||||
**{'dscp_mark': {
|
||||
'allow_post': True, 'allow_put': True,
|
||||
'convert_to': attr.convert_to_int,
|
||||
'convert_to': converters.convert_to_int,
|
||||
'is_visible': True, 'default': None,
|
||||
'validate': {'type:values': common_constants.
|
||||
VALID_DSCP_MARKS}}})
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import importutils
|
||||
@ -20,7 +21,6 @@ import webob
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.api.v2 import base
|
||||
from neutron.api.v2 import resource
|
||||
from neutron.common import constants as const
|
||||
@ -56,7 +56,7 @@ class QuotaSetsController(wsgi.Controller):
|
||||
attr_dict[quota_resource] = {
|
||||
'allow_post': False,
|
||||
'allow_put': True,
|
||||
'convert_to': attributes.convert_to_int,
|
||||
'convert_to': converters.convert_to_int,
|
||||
'validate': {'type:range': [-1, const.DB_INTEGER_MAX_VALUE]},
|
||||
'is_visible': True}
|
||||
self._update_extended_attributes = False
|
||||
|
@ -16,6 +16,7 @@
|
||||
import abc
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from neutron_lib import exceptions as nexception
|
||||
from oslo_config import cfg
|
||||
@ -211,7 +212,7 @@ def _validate_name_not_default(data, valid_values=None):
|
||||
raise SecurityGroupDefaultAlreadyExists()
|
||||
|
||||
|
||||
attr.validators['type:name_not_default'] = _validate_name_not_default
|
||||
validators.validators['type:name_not_default'] = _validate_name_not_default
|
||||
|
||||
# TODO(amotoki): const.IP_PROTOCOL_MAP now comes from neutron-lib,
|
||||
# so we cannot add PROTO_NAME_IPV6_ICMP_LEGACY to const.IP_PROTOCOL_MAP
|
||||
|
@ -12,10 +12,11 @@
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import six
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import exceptions
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
import webob.exc
|
||||
|
||||
from neutron._i18n import _
|
||||
@ -61,7 +62,7 @@ def get_parent_resource_and_id(kwargs):
|
||||
|
||||
|
||||
def validate_tag(tag):
|
||||
msg = attributes._validate_string(tag, MAX_TAG_LEN)
|
||||
msg = validators.validate_string(tag, MAX_TAG_LEN)
|
||||
if msg:
|
||||
raise exceptions.InvalidInput(error_message=msg)
|
||||
|
||||
@ -69,7 +70,7 @@ def validate_tag(tag):
|
||||
def validate_tags(body):
|
||||
if 'tags' not in body:
|
||||
raise exceptions.InvalidInput(error_message="Invalid tags body.")
|
||||
msg = attributes.validate_list_of_unique_strings(body['tags'], MAX_TAG_LEN)
|
||||
msg = validators.validate_list_of_unique_strings(body['tags'], MAX_TAG_LEN)
|
||||
if msg:
|
||||
raise exceptions.InvalidInput(error_message=msg)
|
||||
|
||||
|
@ -12,6 +12,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions
|
||||
from oslo_config import cfg
|
||||
@ -19,7 +21,6 @@ from oslo_log import log as logging
|
||||
|
||||
from neutron._i18n import _, _LI
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -33,7 +34,7 @@ VLANTRANSPARENT = 'vlan_transparent'
|
||||
EXTENDED_ATTRIBUTES_2_0 = {
|
||||
'networks': {
|
||||
VLANTRANSPARENT: {'allow_post': True, 'allow_put': False,
|
||||
'convert_to': attributes.convert_to_boolean,
|
||||
'convert_to': converters.convert_to_boolean,
|
||||
'default': constants.ATTR_NOT_SPECIFIED,
|
||||
'is_visible': True},
|
||||
},
|
||||
@ -50,7 +51,7 @@ def disable_extension_by_config(aliases):
|
||||
def get_vlan_transparent(network):
|
||||
return (network['vlan_transparent']
|
||||
if ('vlan_transparent' in network and
|
||||
attributes.is_attr_set(network['vlan_transparent']))
|
||||
validators.is_attr_set(network['vlan_transparent']))
|
||||
else False)
|
||||
|
||||
|
||||
|
@ -13,12 +13,12 @@
|
||||
import abc
|
||||
|
||||
import netaddr
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.common import ipv6_utils
|
||||
from neutron.common import utils as common_utils
|
||||
from neutron.ipam import exceptions as ipam_exc
|
||||
@ -276,11 +276,11 @@ class SubnetRequestFactory(object):
|
||||
def get_request(cls, context, subnet, subnetpool):
|
||||
cidr = subnet.get('cidr')
|
||||
subnet_id = subnet.get('id', uuidutils.generate_uuid())
|
||||
is_any_subnetpool_request = not attributes.is_attr_set(cidr)
|
||||
is_any_subnetpool_request = not validators.is_attr_set(cidr)
|
||||
|
||||
if is_any_subnetpool_request:
|
||||
prefixlen = subnet['prefixlen']
|
||||
if not attributes.is_attr_set(prefixlen):
|
||||
if not validators.is_attr_set(prefixlen):
|
||||
prefixlen = int(subnetpool['default_prefixlen'])
|
||||
|
||||
return AnySubnetRequest(
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
@ -90,7 +91,7 @@ class QuotaController(utils.NeutronPecanController):
|
||||
attr_dict[quota_resource] = {
|
||||
'allow_post': False,
|
||||
'allow_put': True,
|
||||
'convert_to': attributes.convert_to_int,
|
||||
'convert_to': converters.convert_to_int,
|
||||
'validate': {
|
||||
'type:range': [-1, constants.DB_INTEGER_MAX_VALUE]},
|
||||
'is_visible': True}
|
||||
|
@ -13,11 +13,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
from neutron._i18n import _LE, _LI
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.callbacks import events
|
||||
from neutron.callbacks import registry
|
||||
from neutron.callbacks import resources
|
||||
@ -42,7 +42,7 @@ class DNSExtensionDriver(api.ExtensionDriver):
|
||||
|
||||
def process_create_network(self, plugin_context, request_data, db_data):
|
||||
dns_domain = request_data.get(dns.DNSDOMAIN)
|
||||
if not attributes.is_attr_set(dns_domain):
|
||||
if not validators.is_attr_set(dns_domain):
|
||||
return
|
||||
|
||||
if dns_domain:
|
||||
@ -52,7 +52,7 @@ class DNSExtensionDriver(api.ExtensionDriver):
|
||||
|
||||
def process_update_network(self, plugin_context, request_data, db_data):
|
||||
new_value = request_data.get(dns.DNSDOMAIN)
|
||||
if not attributes.is_attr_set(new_value):
|
||||
if not validators.is_attr_set(new_value):
|
||||
return
|
||||
|
||||
current_dns_domain = db_data.get(dns.DNSDOMAIN)
|
||||
|
@ -13,10 +13,10 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from oslo_log import log as logging
|
||||
|
||||
from neutron._i18n import _LI
|
||||
from neutron.api.v2 import attributes as attrs
|
||||
from neutron.common import utils
|
||||
from neutron.db import common_db_mixin
|
||||
from neutron.db import portsecurity_db_common as ps_db_common
|
||||
@ -75,7 +75,7 @@ class PortSecurityExtensionDriver(api.ExtensionDriver,
|
||||
if port.get('device_owner') and utils.is_port_trusted(port):
|
||||
return False
|
||||
|
||||
if attrs.is_attr_set(port.get(psec.PORTSECURITY)):
|
||||
if validators.is_attr_set(port.get(psec.PORTSECURITY)):
|
||||
port_security_enabled = port[psec.PORTSECURITY]
|
||||
else:
|
||||
port_security_enabled = self._get_network_security_binding(
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as exc
|
||||
from oslo_config import cfg
|
||||
@ -22,7 +23,6 @@ import six
|
||||
import stevedore
|
||||
|
||||
from neutron._i18n import _, _LE, _LI, _LW
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.extensions import external_net
|
||||
from neutron.extensions import multiprovidernet as mpnet
|
||||
from neutron.extensions import portbindings
|
||||
@ -92,7 +92,7 @@ class TypeManager(stevedore.named.NamedExtensionManager):
|
||||
segmentation_id) = (self._get_attribute(segment, attr)
|
||||
for attr in provider.ATTRIBUTES)
|
||||
|
||||
if attributes.is_attr_set(network_type):
|
||||
if validators.is_attr_set(network_type):
|
||||
segment = {api.NETWORK_TYPE: network_type,
|
||||
api.PHYSICAL_NETWORK: physical_network,
|
||||
api.SEGMENTATION_ID: segmentation_id}
|
||||
@ -103,15 +103,15 @@ class TypeManager(stevedore.named.NamedExtensionManager):
|
||||
raise exc.InvalidInput(error_message=msg)
|
||||
|
||||
def _process_provider_create(self, network):
|
||||
if any(attributes.is_attr_set(network.get(attr))
|
||||
if any(validators.is_attr_set(network.get(attr))
|
||||
for attr in provider.ATTRIBUTES):
|
||||
# Verify that multiprovider and provider attributes are not set
|
||||
# at the same time.
|
||||
if attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
|
||||
if validators.is_attr_set(network.get(mpnet.SEGMENTS)):
|
||||
raise mpnet.SegmentsSetInConjunctionWithProviders()
|
||||
segment = self._get_provider_segment(network)
|
||||
return [self._process_provider_segment(segment)]
|
||||
elif attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
|
||||
elif validators.is_attr_set(network.get(mpnet.SEGMENTS)):
|
||||
segments = [self._process_provider_segment(s)
|
||||
for s in network[mpnet.SEGMENTS]]
|
||||
mpnet.check_duplicate_segments(segments, self.is_partial_segment)
|
||||
@ -133,10 +133,10 @@ class TypeManager(stevedore.named.NamedExtensionManager):
|
||||
def network_matches_filters(self, network, filters):
|
||||
if not filters:
|
||||
return True
|
||||
if any(attributes.is_attr_set(network.get(attr))
|
||||
if any(validators.is_attr_set(network.get(attr))
|
||||
for attr in provider.ATTRIBUTES):
|
||||
segments = [self._get_provider_segment(network)]
|
||||
elif attributes.is_attr_set(network.get(mpnet.SEGMENTS)):
|
||||
elif validators.is_attr_set(network.get(mpnet.SEGMENTS)):
|
||||
segments = self._get_attribute(network, mpnet.SEGMENTS)
|
||||
else:
|
||||
return True
|
||||
|
@ -14,6 +14,7 @@
|
||||
# under the License.
|
||||
|
||||
from eventlet import greenthread
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from neutron_lib import exceptions as exc
|
||||
from oslo_concurrency import lockutils
|
||||
@ -261,13 +262,13 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
|
||||
host = attrs.get(portbindings.HOST_ID) or ''
|
||||
|
||||
original_host = binding.host
|
||||
if (attributes.is_attr_set(host) and
|
||||
if (validators.is_attr_set(host) and
|
||||
original_host != host):
|
||||
binding.host = host
|
||||
changes = True
|
||||
|
||||
vnic_type = attrs and attrs.get(portbindings.VNIC_TYPE)
|
||||
if (attributes.is_attr_set(vnic_type) and
|
||||
if (validators.is_attr_set(vnic_type) and
|
||||
binding.vnic_type != vnic_type):
|
||||
binding.vnic_type = vnic_type
|
||||
changes = True
|
||||
@ -1335,7 +1336,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
|
||||
attrs = port[attributes.PORT]
|
||||
|
||||
host = attrs and attrs.get(portbindings.HOST_ID)
|
||||
host_set = attributes.is_attr_set(host)
|
||||
host_set = validators.is_attr_set(host)
|
||||
|
||||
if not host_set:
|
||||
LOG.error(_LE("No Host supplied to bind DVR Port %s"), id)
|
||||
|
@ -13,918 +13,16 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import string
|
||||
|
||||
import mock
|
||||
import netaddr
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_utils import uuidutils
|
||||
import testtools
|
||||
import webob.exc
|
||||
|
||||
from neutron._i18n import _
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron import context
|
||||
from neutron.tests import base
|
||||
from neutron.tests import tools
|
||||
|
||||
|
||||
class TestAttributes(base.BaseTestCase):
|
||||
|
||||
def _construct_dict_and_constraints(self):
|
||||
"""Constructs a test dictionary and a definition of constraints.
|
||||
:return: A (dictionary, constraint) tuple
|
||||
"""
|
||||
constraints = {'key1': {'type:values': ['val1', 'val2'],
|
||||
'required': True},
|
||||
'key2': {'type:string': None,
|
||||
'required': False},
|
||||
'key3': {'type:dict': {'k4': {'type:string': None,
|
||||
'required': True}},
|
||||
'required': True}}
|
||||
|
||||
dictionary = {'key1': 'val1',
|
||||
'key2': 'a string value',
|
||||
'key3': {'k4': 'a string value'}}
|
||||
|
||||
return dictionary, constraints
|
||||
|
||||
def test_is_attr_set(self):
|
||||
data = constants.ATTR_NOT_SPECIFIED
|
||||
self.assertIs(attributes.is_attr_set(data), False)
|
||||
|
||||
data = None
|
||||
self.assertIs(attributes.is_attr_set(data), False)
|
||||
|
||||
data = "I'm set"
|
||||
self.assertIs(attributes.is_attr_set(data), True)
|
||||
|
||||
def test_validate_values(self):
|
||||
msg = attributes._validate_values(4, [4, 6])
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_values(4, (4, 6))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_values(7, [4, 6])
|
||||
self.assertEqual("'7' is not in [4, 6]", msg)
|
||||
|
||||
msg = attributes._validate_values(7, (4, 6))
|
||||
self.assertEqual("'7' is not in (4, 6)", msg)
|
||||
|
||||
def test_validate_not_empty_string(self):
|
||||
msg = attributes._validate_not_empty_string(' ', None)
|
||||
self.assertEqual(u"' ' Blank strings are not permitted", msg)
|
||||
|
||||
def test_validate_not_empty_string_or_none(self):
|
||||
msg = attributes._validate_not_empty_string_or_none(' ', None)
|
||||
self.assertEqual(u"' ' Blank strings are not permitted", msg)
|
||||
|
||||
msg = attributes._validate_not_empty_string_or_none(None, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_string_or_none(self):
|
||||
msg = attributes._validate_not_empty_string_or_none('test', None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_not_empty_string_or_none(None, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_string(self):
|
||||
msg = attributes._validate_string(None, None)
|
||||
self.assertEqual("'None' is not a valid string", msg)
|
||||
|
||||
# 0 == len(data) == max_len
|
||||
msg = attributes._validate_string("", 0)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# 0 == len(data) < max_len
|
||||
msg = attributes._validate_string("", 9)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# 0 < len(data) < max_len
|
||||
msg = attributes._validate_string("123456789", 10)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# 0 < len(data) == max_len
|
||||
msg = attributes._validate_string("123456789", 9)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# 0 < max_len < len(data)
|
||||
msg = attributes._validate_string("1234567890", 9)
|
||||
self.assertEqual("'1234567890' exceeds maximum length of 9", msg)
|
||||
|
||||
msg = attributes._validate_string("123456789", None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_list_of_unique_strings(self):
|
||||
data = "TEST"
|
||||
msg = attributes.validate_list_of_unique_strings(data, None)
|
||||
self.assertEqual("'TEST' is not a list", msg)
|
||||
|
||||
data = ["TEST01", "TEST02", "TEST01"]
|
||||
msg = attributes.validate_list_of_unique_strings(data, None)
|
||||
self.assertEqual(
|
||||
"Duplicate items in the list: 'TEST01, TEST02, TEST01'", msg)
|
||||
|
||||
data = ["12345678", "123456789"]
|
||||
msg = attributes.validate_list_of_unique_strings(data, 8)
|
||||
self.assertEqual("'123456789' exceeds maximum length of 8", msg)
|
||||
|
||||
data = ["TEST01", "TEST02", "TEST03"]
|
||||
msg = attributes.validate_list_of_unique_strings(data, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_no_whitespace(self):
|
||||
data = 'no_white_space'
|
||||
result = attributes._validate_no_whitespace(data)
|
||||
self.assertEqual(data, result)
|
||||
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes._validate_no_whitespace,
|
||||
'i have whitespace')
|
||||
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes._validate_no_whitespace,
|
||||
'i\thave\twhitespace')
|
||||
|
||||
for ws in string.whitespace:
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes._validate_no_whitespace,
|
||||
'%swhitespace-at-head' % ws)
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes._validate_no_whitespace,
|
||||
'whitespace-at-tail%s' % ws)
|
||||
|
||||
def test_validate_range(self):
|
||||
msg = attributes._validate_range(1, [1, 9])
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(5, [1, 9])
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(9, [1, 9])
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(1, (1, 9))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(5, (1, 9))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(9, (1, 9))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(0, [1, 9])
|
||||
self.assertEqual("'0' is too small - must be at least '1'", msg)
|
||||
|
||||
msg = attributes._validate_range(10, (1, 9))
|
||||
self.assertEqual("'10' is too large - must be no larger than '9'", msg)
|
||||
|
||||
msg = attributes._validate_range("bogus", (1, 9))
|
||||
self.assertEqual("'bogus' is not an integer", msg)
|
||||
|
||||
msg = attributes._validate_range(10, (attributes.UNLIMITED,
|
||||
attributes.UNLIMITED))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(10, (1, attributes.UNLIMITED))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(1, (attributes.UNLIMITED, 9))
|
||||
self.assertIsNone(msg)
|
||||
|
||||
msg = attributes._validate_range(-1, (0, attributes.UNLIMITED))
|
||||
self.assertEqual("'-1' is too small - must be at least '0'", msg)
|
||||
|
||||
msg = attributes._validate_range(10, (attributes.UNLIMITED, 9))
|
||||
self.assertEqual("'10' is too large - must be no larger than '9'", msg)
|
||||
|
||||
def _test_validate_mac_address(self, validator, allow_none=False):
|
||||
mac_addr = "ff:16:3e:4f:00:00"
|
||||
msg = validator(mac_addr)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
mac_addr = "ffa:16:3e:4f:00:00"
|
||||
msg = validator(mac_addr)
|
||||
err_msg = "'%s' is not a valid MAC address"
|
||||
self.assertEqual(err_msg % mac_addr, msg)
|
||||
|
||||
for invalid_mac_addr in constants.INVALID_MAC_ADDRESSES:
|
||||
msg = validator(invalid_mac_addr)
|
||||
self.assertEqual(err_msg % invalid_mac_addr, msg)
|
||||
|
||||
mac_addr = "123"
|
||||
msg = validator(mac_addr)
|
||||
self.assertEqual(err_msg % mac_addr, msg)
|
||||
|
||||
mac_addr = None
|
||||
msg = validator(mac_addr)
|
||||
if allow_none:
|
||||
self.assertIsNone(msg)
|
||||
else:
|
||||
self.assertEqual(err_msg % mac_addr, msg)
|
||||
|
||||
mac_addr = "ff:16:3e:4f:00:00\r"
|
||||
msg = validator(mac_addr)
|
||||
self.assertEqual(err_msg % mac_addr, msg)
|
||||
|
||||
def test_validate_mac_address(self):
|
||||
self._test_validate_mac_address(attributes._validate_mac_address)
|
||||
|
||||
def test_validate_mac_address_or_none(self):
|
||||
self._test_validate_mac_address(
|
||||
attributes._validate_mac_address_or_none, allow_none=True)
|
||||
|
||||
def test_validate_ip_address(self):
|
||||
ip_addr = '1.1.1.1'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
ip_addr = '1111.1.1.1'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
# Depending on platform to run UTs, this case might or might not be
|
||||
# an equivalent to test_validate_ip_address_bsd.
|
||||
ip_addr = '1' * 59
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
ip_addr = '1.1.1.1 has whitespace'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
ip_addr = '111.1.1.1\twhitespace'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
ip_addr = '111.1.1.1\nwhitespace'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
for ws in string.whitespace:
|
||||
ip_addr = '%s111.1.1.1' % ws
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
for ws in string.whitespace:
|
||||
ip_addr = '111.1.1.1%s' % ws
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
def test_validate_ip_address_with_leading_zero(self):
|
||||
ip_addr = '1.1.1.01'
|
||||
expected_msg = ("'%(data)s' is not an accepted IP address, "
|
||||
"'%(ip)s' is recommended")
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual(expected_msg % {"data": ip_addr, "ip": '1.1.1.1'},
|
||||
msg)
|
||||
|
||||
ip_addr = '1.1.1.011'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual(expected_msg % {"data": ip_addr, "ip": '1.1.1.11'},
|
||||
msg)
|
||||
|
||||
ip_addr = '1.1.1.09'
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertEqual(expected_msg % {"data": ip_addr, "ip": '1.1.1.9'},
|
||||
msg)
|
||||
|
||||
ip_addr = "fe80:0:0:0:0:0:0:0001"
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_ip_address_bsd(self):
|
||||
# NOTE(yamamoto): On NetBSD and OS X, netaddr.IPAddress() accepts
|
||||
# '1' * 59 as a valid address. The behaviour is inherited from
|
||||
# libc behaviour there. This test ensures that our validator reject
|
||||
# such addresses on such platforms by mocking netaddr to emulate
|
||||
# the behaviour.
|
||||
ip_addr = '1' * 59
|
||||
with mock.patch('netaddr.IPAddress') as ip_address_cls:
|
||||
msg = attributes._validate_ip_address(ip_addr)
|
||||
ip_address_cls.assert_called_once_with(ip_addr,
|
||||
flags=netaddr.core.ZEROFILL)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
def test_validate_ip_pools(self):
|
||||
pools = [[{'end': '10.0.0.254'}],
|
||||
[{'start': '10.0.0.254'}],
|
||||
[{'start': '1000.0.0.254',
|
||||
'end': '1.1.1.1'}],
|
||||
[{'start': '10.0.0.2', 'end': '10.0.0.254',
|
||||
'forza': 'juve'}],
|
||||
[{'start': '10.0.0.2', 'end': '10.0.0.254'},
|
||||
{'end': '10.0.0.254'}],
|
||||
[None],
|
||||
None]
|
||||
for pool in pools:
|
||||
msg = attributes._validate_ip_pools(pool)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
pools = [[{'end': '10.0.0.254', 'start': '10.0.0.2'},
|
||||
{'start': '11.0.0.2', 'end': '11.1.1.1'}],
|
||||
[{'start': '11.0.0.2', 'end': '11.0.0.100'}]]
|
||||
for pool in pools:
|
||||
msg = attributes._validate_ip_pools(pool)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
invalid_ip = '10.0.0.2\r'
|
||||
pools = [[{'end': '10.0.0.254', 'start': invalid_ip}]]
|
||||
for pool in pools:
|
||||
msg = attributes._validate_ip_pools(pool)
|
||||
self.assertEqual("'%s' is not a valid IP address" % invalid_ip,
|
||||
msg)
|
||||
|
||||
def test_validate_fixed_ips(self):
|
||||
fixed_ips = [
|
||||
{'data': [{'subnet_id': '00000000-ffff-ffff-ffff-000000000000',
|
||||
'ip_address': '1111.1.1.1'}],
|
||||
'error_msg': "'1111.1.1.1' is not a valid IP address"},
|
||||
{'data': [{'subnet_id': 'invalid',
|
||||
'ip_address': '1.1.1.1'}],
|
||||
'error_msg': "'invalid' is not a valid UUID"},
|
||||
{'data': None,
|
||||
'error_msg': "Invalid data format for fixed IP: 'None'"},
|
||||
{'data': "1.1.1.1",
|
||||
'error_msg': "Invalid data format for fixed IP: '1.1.1.1'"},
|
||||
{'data': ['00000000-ffff-ffff-ffff-000000000000', '1.1.1.1'],
|
||||
'error_msg': "Invalid data format for fixed IP: "
|
||||
"'00000000-ffff-ffff-ffff-000000000000'"},
|
||||
{'data': [['00000000-ffff-ffff-ffff-000000000000', '1.1.1.1']],
|
||||
'error_msg': "Invalid data format for fixed IP: "
|
||||
"'['00000000-ffff-ffff-ffff-000000000000', "
|
||||
"'1.1.1.1']'"},
|
||||
{'data': [{'subnet_id': '00000000-0fff-ffff-ffff-000000000000',
|
||||
'ip_address': '1.1.1.1'},
|
||||
{'subnet_id': '00000000-ffff-ffff-ffff-000000000000',
|
||||
'ip_address': '1.1.1.1'}],
|
||||
'error_msg': "Duplicate IP address '1.1.1.1'"}]
|
||||
for fixed in fixed_ips:
|
||||
msg = attributes._validate_fixed_ips(fixed['data'])
|
||||
self.assertEqual(fixed['error_msg'], msg)
|
||||
|
||||
fixed_ips = [[{'subnet_id': '00000000-ffff-ffff-ffff-000000000000',
|
||||
'ip_address': '1.1.1.1'}],
|
||||
[{'subnet_id': '00000000-0fff-ffff-ffff-000000000000',
|
||||
'ip_address': '1.1.1.1'},
|
||||
{'subnet_id': '00000000-ffff-ffff-ffff-000000000000',
|
||||
'ip_address': '1.1.1.2'}]]
|
||||
for fixed in fixed_ips:
|
||||
msg = attributes._validate_fixed_ips(fixed)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_nameservers(self):
|
||||
ns_pools = [['1.1.1.2', '1.1.1.2'],
|
||||
['www.hostname.com', 'www.hostname.com'],
|
||||
['1000.0.0.1'],
|
||||
['www.hostname.com'],
|
||||
['www.great.marathons.to.travel'],
|
||||
['valid'],
|
||||
['77.hostname.com'],
|
||||
['1' * 59],
|
||||
['www.internal.hostname.com'],
|
||||
None]
|
||||
|
||||
for ns in ns_pools:
|
||||
msg = attributes._validate_nameservers(ns, None)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
ns_pools = [['100.0.0.2'],
|
||||
['1.1.1.1', '1.1.1.2']]
|
||||
|
||||
for ns in ns_pools:
|
||||
msg = attributes._validate_nameservers(ns, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_hostroutes(self):
|
||||
hostroute_pools = [[{'destination': '100.0.0.0/24'}],
|
||||
[{'nexthop': '10.0.2.20'}],
|
||||
[{'nexthop': '10.0.2.20',
|
||||
'forza': 'juve',
|
||||
'destination': '100.0.0.0/8'}],
|
||||
[{'nexthop': '1110.0.2.20',
|
||||
'destination': '100.0.0.0/8'}],
|
||||
[{'nexthop': '10.0.2.20',
|
||||
'destination': '100.0.0.0'}],
|
||||
[{'nexthop': '10.0.2.20',
|
||||
'destination': '100.0.0.0/8'},
|
||||
{'nexthop': '10.0.2.20',
|
||||
'destination': '100.0.0.0/8'}],
|
||||
[None],
|
||||
None]
|
||||
for host_routes in hostroute_pools:
|
||||
msg = attributes._validate_hostroutes(host_routes, None)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
hostroute_pools = [[{'destination': '100.0.0.0/24',
|
||||
'nexthop': '10.0.2.20'}],
|
||||
[{'nexthop': '10.0.2.20',
|
||||
'destination': '100.0.0.0/8'},
|
||||
{'nexthop': '10.0.2.20',
|
||||
'destination': '101.0.0.0/8'}]]
|
||||
for host_routes in hostroute_pools:
|
||||
msg = attributes._validate_hostroutes(host_routes, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_ip_address_or_none(self):
|
||||
ip_addr = None
|
||||
msg = attributes._validate_ip_address_or_none(ip_addr)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
ip_addr = '1.1.1.1'
|
||||
msg = attributes._validate_ip_address_or_none(ip_addr)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
ip_addr = '1111.1.1.1'
|
||||
msg = attributes._validate_ip_address_or_none(ip_addr)
|
||||
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
|
||||
|
||||
def test_uuid_pattern(self):
|
||||
data = 'garbage'
|
||||
msg = attributes._validate_regex(data, constants.UUID_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
data = '00000000-ffff-ffff-ffff-000000000000'
|
||||
msg = attributes._validate_regex(data, constants.UUID_PATTERN)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_mac_pattern(self):
|
||||
# Valid - 3 octets
|
||||
base_mac = "fa:16:3e:00:00:00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - 4 octets
|
||||
base_mac = "fa:16:3e:4f:00:00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Invalid - not unicast
|
||||
base_mac = "01:16:3e:4f:00:00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
# Invalid - invalid format
|
||||
base_mac = "a:16:3e:4f:00:00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
# Invalid - invalid format
|
||||
base_mac = "ffa:16:3e:4f:00:00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
# Invalid - invalid format
|
||||
base_mac = "01163e4f0000"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
# Invalid - invalid format
|
||||
base_mac = "01-16-3e-4f-00-00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
# Invalid - invalid format
|
||||
base_mac = "00:16:3:f:00:00"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
# Invalid - invalid format
|
||||
base_mac = "12:3:4:5:67:89ab"
|
||||
msg = attributes._validate_regex(base_mac,
|
||||
attributes.MAC_PATTERN)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
def _test_validate_subnet(self, validator, allow_none=False):
|
||||
# Valid - IPv4
|
||||
cidr = "10.0.2.0/24"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - IPv6 without final octets
|
||||
cidr = "fe80::/24"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - IPv6 with final octets
|
||||
cidr = "fe80::/24"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - uncompressed ipv6 address
|
||||
cidr = "fe80:0:0:0:0:0:0:0/128"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - ipv6 address with multiple consecutive zero
|
||||
cidr = "2001:0db8:0:0:1::1/128"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - ipv6 address with multiple consecutive zero
|
||||
cidr = "2001:0db8::1:0:0:1/128"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Valid - ipv6 address with multiple consecutive zero
|
||||
cidr = "2001::0:1:0:0:1100/120"
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Invalid - abbreviated ipv4 address
|
||||
cidr = "10/24"
|
||||
msg = validator(cidr, None)
|
||||
error = _("'%(data)s' isn't a recognized IP subnet cidr,"
|
||||
" '%(cidr)s' is recommended") % {"data": cidr,
|
||||
"cidr": "10.0.0.0/24"}
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# Invalid - IPv4 missing mask
|
||||
cidr = "10.0.2.0"
|
||||
msg = validator(cidr, None)
|
||||
error = _("'%(data)s' isn't a recognized IP subnet cidr,"
|
||||
" '%(cidr)s' is recommended") % {"data": cidr,
|
||||
"cidr": "10.0.2.0/32"}
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# Valid - IPv4 with non-zero masked bits is ok
|
||||
for i in range(1, 255):
|
||||
cidr = "192.168.1.%s/24" % i
|
||||
msg = validator(cidr, None)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Invalid - IPv6 without final octets, missing mask
|
||||
cidr = "fe80::"
|
||||
msg = validator(cidr, None)
|
||||
error = _("'%(data)s' isn't a recognized IP subnet cidr,"
|
||||
" '%(cidr)s' is recommended") % {"data": cidr,
|
||||
"cidr": "fe80::/128"}
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# Invalid - IPv6 with final octets, missing mask
|
||||
cidr = "fe80::0"
|
||||
msg = validator(cidr, None)
|
||||
error = _("'%(data)s' isn't a recognized IP subnet cidr,"
|
||||
" '%(cidr)s' is recommended") % {"data": cidr,
|
||||
"cidr": "fe80::/128"}
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# Invalid - Address format error
|
||||
cidr = 'invalid'
|
||||
msg = validator(cidr, None)
|
||||
error = "'%s' is not a valid IP subnet" % cidr
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
cidr = None
|
||||
msg = validator(cidr, None)
|
||||
if allow_none:
|
||||
self.assertIsNone(msg)
|
||||
else:
|
||||
error = "'%s' is not a valid IP subnet" % cidr
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# Invalid - IPv4 with trailing CR
|
||||
cidr = "10.0.2.0/24\r"
|
||||
msg = validator(cidr, None)
|
||||
error = "'%s' is not a valid IP subnet" % cidr
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
def test_validate_subnet(self):
|
||||
self._test_validate_subnet(attributes._validate_subnet)
|
||||
|
||||
def test_validate_subnet_or_none(self):
|
||||
self._test_validate_subnet(attributes._validate_subnet_or_none,
|
||||
allow_none=True)
|
||||
|
||||
def _test_validate_regex(self, validator, allow_none=False):
|
||||
pattern = '[hc]at'
|
||||
|
||||
data = None
|
||||
msg = validator(data, pattern)
|
||||
if allow_none:
|
||||
self.assertIsNone(msg)
|
||||
else:
|
||||
self.assertEqual("'None' is not a valid input", msg)
|
||||
|
||||
data = 'bat'
|
||||
msg = validator(data, pattern)
|
||||
self.assertEqual("'%s' is not a valid input" % data, msg)
|
||||
|
||||
data = 'hat'
|
||||
msg = validator(data, pattern)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
data = 'cat'
|
||||
msg = validator(data, pattern)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_regex(self):
|
||||
self._test_validate_regex(attributes._validate_regex)
|
||||
|
||||
def test_validate_regex_or_none(self):
|
||||
self._test_validate_regex(attributes._validate_regex_or_none,
|
||||
allow_none=True)
|
||||
|
||||
def test_validate_uuid(self):
|
||||
invalid_uuids = [None,
|
||||
123,
|
||||
'123',
|
||||
't5069610-744b-42a7-8bd8-ceac1a229cd4',
|
||||
'e5069610-744bb-42a7-8bd8-ceac1a229cd4']
|
||||
for uuid in invalid_uuids:
|
||||
msg = attributes._validate_uuid(uuid)
|
||||
error = "'%s' is not a valid UUID" % uuid
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
msg = attributes._validate_uuid('00000000-ffff-ffff-ffff-000000000000')
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test__validate_list_of_items(self):
|
||||
# check not a list
|
||||
items = [None,
|
||||
123,
|
||||
'e5069610-744b-42a7-8bd8-ceac1a229cd4',
|
||||
'12345678123456781234567812345678',
|
||||
{'uuid': 'e5069610-744b-42a7-8bd8-ceac1a229cd4'}]
|
||||
for item in items:
|
||||
msg = attributes._validate_list_of_items(mock.Mock(), item)
|
||||
error = "'%s' is not a list" % item
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# check duplicate items in a list
|
||||
duplicate_items = ['e5069610-744b-42a7-8bd8-ceac1a229cd4',
|
||||
'f3eeab00-8367-4524-b662-55e64d4cacb5',
|
||||
'e5069610-744b-42a7-8bd8-ceac1a229cd4']
|
||||
msg = attributes._validate_list_of_items(mock.Mock(), duplicate_items)
|
||||
error = ("Duplicate items in the list: "
|
||||
"'%s'" % ', '.join(duplicate_items))
|
||||
self.assertEqual(error, msg)
|
||||
|
||||
# check valid lists
|
||||
valid_lists = [[],
|
||||
[1, 2, 3],
|
||||
['a', 'b', 'c']]
|
||||
for list_obj in valid_lists:
|
||||
msg = attributes._validate_list_of_items(
|
||||
mock.Mock(return_value=None), list_obj)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_dict_type(self):
|
||||
for value in (None, True, '1', []):
|
||||
self.assertEqual("'%s' is not a dictionary" % value,
|
||||
attributes._validate_dict(value))
|
||||
|
||||
def test_validate_dict_without_constraints(self):
|
||||
msg = attributes._validate_dict({})
|
||||
self.assertIsNone(msg)
|
||||
|
||||
# Validate a dictionary without constraints.
|
||||
msg = attributes._validate_dict({'key': 'value'})
|
||||
self.assertIsNone(msg)
|
||||
|
||||
def test_validate_a_valid_dict_with_constraints(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertIsNone(msg, 'Validation of a valid dictionary failed.')
|
||||
|
||||
def test_validate_dict_with_invalid_validator(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
constraints['key1'] = {'type:unsupported': None, 'required': True}
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertEqual("Validator 'type:unsupported' does not exist.", msg)
|
||||
|
||||
def test_validate_dict_not_required_keys(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
del dictionary['key2']
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertIsNone(msg, 'Field that was not required by the specs was'
|
||||
'required by the validator.')
|
||||
|
||||
def test_validate_dict_required_keys(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
del dictionary['key1']
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertIn('Expected keys:', msg)
|
||||
|
||||
def test_validate_dict_wrong_values(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
dictionary['key1'] = 'UNSUPPORTED'
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertIsNotNone(msg)
|
||||
|
||||
def test_validate_dict_convert_boolean(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
constraints['key_bool'] = {
|
||||
'type:boolean': None,
|
||||
'required': False,
|
||||
'convert_to': attributes.convert_to_boolean}
|
||||
dictionary['key_bool'] = 'true'
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertIsNone(msg)
|
||||
# Explicitly comparing with literal 'True' as assertTrue
|
||||
# succeeds also for 'true'
|
||||
self.assertIs(True, dictionary['key_bool'])
|
||||
|
||||
def test_subdictionary(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
del dictionary['key3']['k4']
|
||||
dictionary['key3']['k5'] = 'a string value'
|
||||
msg = attributes._validate_dict(dictionary, constraints)
|
||||
self.assertIn('Expected keys:', msg)
|
||||
|
||||
def test_validate_dict_or_none(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
# Check whether None is a valid value.
|
||||
msg = attributes._validate_dict_or_none(None, constraints)
|
||||
self.assertIsNone(msg, 'Validation of a None dictionary failed.')
|
||||
|
||||
# Check validation of a regular dictionary.
|
||||
msg = attributes._validate_dict_or_none(dictionary, constraints)
|
||||
self.assertIsNone(msg, 'Validation of a valid dictionary failed.')
|
||||
|
||||
def test_validate_dict_or_empty(self):
|
||||
dictionary, constraints = self._construct_dict_and_constraints()
|
||||
|
||||
# Check whether an empty dictionary is valid.
|
||||
msg = attributes._validate_dict_or_empty({}, constraints)
|
||||
self.assertIsNone(msg, 'Validation of a None dictionary failed.')
|
||||
|
||||
# Check validation of a regular dictionary.
|
||||
msg = attributes._validate_dict_or_none(dictionary, constraints)
|
||||
self.assertIsNone(msg, 'Validation of a valid dictionary failed.')
|
||||
self.assertIsNone(msg, 'Validation of a valid dictionary failed.')
|
||||
|
||||
def test_validate_non_negative(self):
|
||||
for value in (-1, '-2'):
|
||||
self.assertEqual("'%s' should be non-negative" % value,
|
||||
attributes._validate_non_negative(value))
|
||||
|
||||
for value in (0, 1, '2', True, False):
|
||||
msg = attributes._validate_non_negative(value)
|
||||
self.assertIsNone(msg)
|
||||
|
||||
|
||||
class TestConvertToBoolean(base.BaseTestCase):
|
||||
|
||||
def test_convert_to_boolean_bool(self):
|
||||
self.assertIs(attributes.convert_to_boolean(True), True)
|
||||
self.assertIs(attributes.convert_to_boolean(False), False)
|
||||
|
||||
def test_convert_to_boolean_int(self):
|
||||
self.assertIs(attributes.convert_to_boolean(0), False)
|
||||
self.assertIs(attributes.convert_to_boolean(1), True)
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes.convert_to_boolean,
|
||||
7)
|
||||
|
||||
def test_convert_to_boolean_str(self):
|
||||
self.assertIs(attributes.convert_to_boolean('True'), True)
|
||||
self.assertIs(attributes.convert_to_boolean('true'), True)
|
||||
self.assertIs(attributes.convert_to_boolean('False'), False)
|
||||
self.assertIs(attributes.convert_to_boolean('false'), False)
|
||||
self.assertIs(attributes.convert_to_boolean('0'), False)
|
||||
self.assertIs(attributes.convert_to_boolean('1'), True)
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes.convert_to_boolean,
|
||||
'7')
|
||||
|
||||
|
||||
class TestConvertToInt(base.BaseTestCase):
|
||||
|
||||
def test_convert_to_int_int(self):
|
||||
self.assertEqual(-1, attributes.convert_to_int(-1))
|
||||
self.assertEqual(0, attributes.convert_to_int(0))
|
||||
self.assertEqual(1, attributes.convert_to_int(1))
|
||||
|
||||
def test_convert_to_int_if_not_none(self):
|
||||
self.assertEqual(-1, attributes.convert_to_int_if_not_none(-1))
|
||||
self.assertEqual(0, attributes.convert_to_int_if_not_none(0))
|
||||
self.assertEqual(1, attributes.convert_to_int_if_not_none(1))
|
||||
self.assertIsNone(attributes.convert_to_int_if_not_none(None))
|
||||
|
||||
def test_convert_to_int_str(self):
|
||||
self.assertEqual(4, attributes.convert_to_int('4'))
|
||||
self.assertEqual(6, attributes.convert_to_int('6'))
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes.convert_to_int,
|
||||
'garbage')
|
||||
|
||||
def test_convert_to_int_none(self):
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes.convert_to_int,
|
||||
None)
|
||||
|
||||
def test_convert_none_to_empty_list_none(self):
|
||||
self.assertEqual([], attributes.convert_none_to_empty_list(None))
|
||||
|
||||
def test_convert_none_to_empty_dict(self):
|
||||
self.assertEqual({}, attributes.convert_none_to_empty_dict(None))
|
||||
|
||||
def test_convert_none_to_empty_list_value(self):
|
||||
values = ['1', 3, [], [1], {}, {'a': 3}]
|
||||
for value in values:
|
||||
self.assertEqual(
|
||||
value, attributes.convert_none_to_empty_list(value))
|
||||
|
||||
|
||||
class TestConvertToFloat(base.BaseTestCase):
|
||||
# NOTE: the routine being tested here is a plugin-specific extension
|
||||
# module. As the plugin split proceed towards its second phase this
|
||||
# test should either be remove, or the validation routine moved into
|
||||
# neutron.api.v2.attributes
|
||||
|
||||
def test_convert_to_float_positve_value(self):
|
||||
self.assertEqual(
|
||||
1.111, attributes.convert_to_positive_float_or_none(1.111))
|
||||
self.assertEqual(1, attributes.convert_to_positive_float_or_none(1))
|
||||
self.assertEqual(0, attributes.convert_to_positive_float_or_none(0))
|
||||
|
||||
def test_convert_to_float_negative_value(self):
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes.convert_to_positive_float_or_none,
|
||||
-1.11)
|
||||
|
||||
def test_convert_to_float_string(self):
|
||||
self.assertEqual(4, attributes.convert_to_positive_float_or_none('4'))
|
||||
self.assertEqual(
|
||||
4.44, attributes.convert_to_positive_float_or_none('4.44'))
|
||||
self.assertRaises(n_exc.InvalidInput,
|
||||
attributes.convert_to_positive_float_or_none,
|
||||
'garbage')
|
||||
|
||||
def test_convert_to_float_none_value(self):
|
||||
self.assertIsNone(attributes.convert_to_positive_float_or_none(None))
|
||||
|
||||
|
||||
class TestConvertKvp(base.BaseTestCase):
|
||||
|
||||
def test_convert_kvp_list_to_dict_succeeds_for_missing_values(self):
|
||||
result = attributes.convert_kvp_list_to_dict(['True'])
|
||||
self.assertEqual({}, result)
|
||||
|
||||
def test_convert_kvp_list_to_dict_succeeds_for_multiple_values(self):
|
||||
result = attributes.convert_kvp_list_to_dict(
|
||||
['a=b', 'a=c', 'a=c', 'b=a'])
|
||||
expected = {'a': tools.UnorderedList(['c', 'b']), 'b': ['a']}
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_convert_kvp_list_to_dict_succeeds_for_values(self):
|
||||
result = attributes.convert_kvp_list_to_dict(['a=b', 'c=d'])
|
||||
self.assertEqual({'a': ['b'], 'c': ['d']}, result)
|
||||
|
||||
def test_convert_kvp_str_to_list_fails_for_missing_key(self):
|
||||
with testtools.ExpectedException(n_exc.InvalidInput):
|
||||
attributes.convert_kvp_str_to_list('=a')
|
||||
|
||||
def test_convert_kvp_str_to_list_fails_for_missing_equals(self):
|
||||
with testtools.ExpectedException(n_exc.InvalidInput):
|
||||
attributes.convert_kvp_str_to_list('a')
|
||||
|
||||
def test_convert_kvp_str_to_list_succeeds_for_one_equals(self):
|
||||
result = attributes.convert_kvp_str_to_list('a=')
|
||||
self.assertEqual(['a', ''], result)
|
||||
|
||||
def test_convert_kvp_str_to_list_succeeds_for_two_equals(self):
|
||||
result = attributes.convert_kvp_str_to_list('a=a=a')
|
||||
self.assertEqual(['a', 'a=a'], result)
|
||||
|
||||
|
||||
class TestConvertToList(base.BaseTestCase):
|
||||
|
||||
def test_convert_to_empty_list(self):
|
||||
for item in (None, [], (), {}):
|
||||
self.assertEqual([], attributes.convert_to_list(item))
|
||||
|
||||
def test_convert_to_list_string(self):
|
||||
for item in ('', 'foo'):
|
||||
self.assertEqual([item], attributes.convert_to_list(item))
|
||||
|
||||
def test_convert_to_list_iterable(self):
|
||||
for item in ([None], [1, 2, 3], (1, 2, 3), set([1, 2, 3]), ['foo']):
|
||||
self.assertEqual(list(item), attributes.convert_to_list(item))
|
||||
|
||||
def test_convert_to_list_non_iterable(self):
|
||||
for item in (True, False, 1, 1.2, object()):
|
||||
self.assertEqual([item], attributes.convert_to_list(item))
|
||||
|
||||
|
||||
class TestResDict(base.BaseTestCase):
|
||||
@ -986,7 +84,7 @@ class TestResDict(base.BaseTestCase):
|
||||
|
||||
attr_info = {
|
||||
'key': {
|
||||
'convert_to': attributes.convert_to_int,
|
||||
'convert_to': converters.convert_to_int,
|
||||
},
|
||||
}
|
||||
self._test_convert_value(attr_info,
|
||||
|
@ -16,6 +16,7 @@
|
||||
import os
|
||||
|
||||
import mock
|
||||
from neutron_lib.api import converters
|
||||
from neutron_lib import constants
|
||||
from neutron_lib import exceptions as n_exc
|
||||
from oslo_config import cfg
|
||||
@ -1593,7 +1594,7 @@ class FiltersTestCase(base.BaseTestCase):
|
||||
request = webob.Request.blank(path)
|
||||
attr_info = {
|
||||
'foo': {
|
||||
'convert_list_to': attributes.convert_kvp_list_to_dict,
|
||||
'convert_list_to': converters.convert_kvp_list_to_dict,
|
||||
}
|
||||
}
|
||||
expect_val = {'foo': {'key': ['2', '4']}, 'bar': ['3'], 'qux': ['1']}
|
||||
@ -1603,7 +1604,7 @@ class FiltersTestCase(base.BaseTestCase):
|
||||
def test_attr_info_with_convert_to(self):
|
||||
path = '/?foo=4&bar=3&baz=2&qux=1'
|
||||
request = webob.Request.blank(path)
|
||||
attr_info = {'foo': {'convert_to': attributes.convert_to_int}}
|
||||
attr_info = {'foo': {'convert_to': converters.convert_to_int}}
|
||||
expect_val = {'foo': [4], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
|
||||
actual_val = api_common.get_filters(request, attr_info)
|
||||
self.assertEqual(expect_val, actual_val)
|
||||
|
@ -13,10 +13,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from oslo_config import cfg
|
||||
from webob import exc as web_exc
|
||||
|
||||
from neutron.api.v2 import attributes as attr
|
||||
from neutron.db import allowedaddresspairs_db as addr_pair_db
|
||||
from neutron.db import db_base_plugin_v2
|
||||
from neutron.db import portsecurity_db
|
||||
@ -58,7 +58,7 @@ class AllowedAddressPairTestPlugin(portsecurity_db.PortSecurityDbMixin,
|
||||
neutron_db = super(AllowedAddressPairTestPlugin, self).create_port(
|
||||
context, port)
|
||||
p.update(neutron_db)
|
||||
if attr.is_attr_set(p.get(addr_pair.ADDRESS_PAIRS)):
|
||||
if validators.is_attr_set(p.get(addr_pair.ADDRESS_PAIRS)):
|
||||
self._process_create_allowed_address_pairs(
|
||||
context, p,
|
||||
p[addr_pair.ADDRESS_PAIRS])
|
||||
|
@ -13,9 +13,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
from webob import exc
|
||||
|
||||
from neutron.api.v2 import attributes as attr
|
||||
from neutron import context
|
||||
from neutron.db import db_base_plugin_v2
|
||||
from neutron.db import portsecurity_db
|
||||
@ -99,7 +99,7 @@ class PortSecurityTestPlugin(db_base_plugin_v2.NeutronDbPluginV2,
|
||||
p[psec.PORTSECURITY] = port_security
|
||||
self._process_port_port_security_create(context, p, neutron_db)
|
||||
|
||||
if (attr.is_attr_set(p.get(ext_sg.SECURITYGROUPS)) and
|
||||
if (validators.is_attr_set(p.get(ext_sg.SECURITYGROUPS)) and
|
||||
not (port_security and has_ip)):
|
||||
raise psec.PortSecurityAndIPRequiredForSecurityGroups()
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
import contextlib
|
||||
|
||||
import mock
|
||||
from neutron_lib.api import validators
|
||||
from neutron_lib import constants as const
|
||||
from oslo_config import cfg
|
||||
import oslo_db.exception as exc
|
||||
@ -181,7 +182,7 @@ class SecurityGroupTestPlugin(db_base_plugin_v2.NeutronDbPluginV2,
|
||||
def create_port(self, context, port):
|
||||
tenant_id = port['port']['tenant_id']
|
||||
default_sg = self._ensure_default_security_group(context, tenant_id)
|
||||
if not attr.is_attr_set(port['port'].get(ext_sg.SECURITYGROUPS)):
|
||||
if not validators.is_attr_set(port['port'].get(ext_sg.SECURITYGROUPS)):
|
||||
port['port'][ext_sg.SECURITYGROUPS] = [default_sg]
|
||||
session = context.session
|
||||
with session.begin(subtransactions=True):
|
||||
|
@ -16,12 +16,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from neutron_lib.api import validators
|
||||
import oslo_db.sqlalchemy.session
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from neutron.api import extensions
|
||||
from neutron.api.v2 import attributes
|
||||
from neutron.db import model_base
|
||||
from neutron.db import models_v2
|
||||
from neutron.plugins.ml2 import driver_api
|
||||
@ -146,7 +146,7 @@ class TestPortExtension(model_base.BASEV2):
|
||||
class TestDBExtensionDriver(TestExtensionDriverBase):
|
||||
def _get_value(self, data, key):
|
||||
value = data[key]
|
||||
if not attributes.is_attr_set(value):
|
||||
if not validators.is_attr_set(value):
|
||||
value = ''
|
||||
return value
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user