Fix used jsonschemas

We have a bunch of schemas with wrong format. This patch fixes them and adds
unit test to avoid such mistakes in future.

Change-Id: I8bafb4b62a38eca3a5d5ec8945d3d4f9f42674ab
This commit is contained in:
Andrey Kurilin
2017-02-08 18:21:58 +02:00
parent a1a5e6a079
commit 4f4ce0385a
19 changed files with 278 additions and 80 deletions

View File

@@ -166,7 +166,10 @@ class OpenStackAPIVersions(context.Context):
"type": "object",
"properties": {
"version": {
"anyOf": [{"type": "string"}, {"type": "number"}]
"anyOf": [{"type": "string",
"description": "a string-like version."},
{"type": "number",
"description": "a number-like version."}]
},
"service_name": {
"type": "string"

View File

@@ -29,7 +29,7 @@ class VolumeTypeGenerator(context.Context):
CONFIG_SCHEMA = {
"type": "array",
"$schema": consts.JSON_SCHEMA,
"additionalProperties": False
"items": {"type": "string"}
}
@logging.log_task_wrapper(LOG.info, _("Enter context: `volume_types`"))

View File

@@ -37,7 +37,12 @@ class VolumeGenerator(context.Context):
"minimum": 1
},
"type": {
"type": ["string", "null"]
"oneOf": [{"type": "string",
"description": "a string-like type of volume to "
"create."},
{"type": "null",
"description": "Use default type for volume to "
"create."}]
},
"volumes_per_tenant": {
"type": "integer",

View File

@@ -29,8 +29,7 @@ class CleanupMixin(object):
"$schema": consts.JSON_SCHEMA,
"items": {
"type": "string",
},
"additionalProperties": False
}
}
def setup(self):

View File

@@ -52,12 +52,15 @@ class HeatDataplane(context.Context):
key_name: name of nova ssh keypair to use for "gate" node
"""
FILE_SCHEMA = {
"description": "",
"type": "string",
}
RESOURCE_SCHEMA = {
"description": "",
"type": "array",
"minItems": 2,
"maxItems": 2,
"items": {"type": "string"}
}
CONFIG_SCHEMA = {
"type": "object",

View File

@@ -49,8 +49,7 @@ class RoleGenerator(context.Context):
"$schema": consts.JSON_SCHEMA,
"items": {
"type": "string",
},
"additionalProperties": False
}
}
def __init__(self, ctx):

View File

@@ -58,7 +58,7 @@ class ClusterTemplateGenerator(context.Context):
"type": "integer"
},
"labels": {
"type": ["string"]
"type": "string"
},
"coe": {
"type": "string"

View File

@@ -38,12 +38,13 @@ class ClusterGenerator(context.Context):
"node_count": {
"type": "integer",
"minimum": 1,
"default": 1
},
},
"additionalProperties": False
}
DEFAULT_CONFIG = {"node_count": 1}
@logging.log_task_wrapper(LOG.info, _("Enter context: `Cluster`"))
def setup(self):
for user, tenant_id in rutils.iterate_per_tenants(

View File

@@ -38,19 +38,26 @@ class SecurityServices(context.Context):
"type": "object",
"$schema": rally_consts.JSON_SCHEMA,
"properties": {
# NOTE(vponomaryov): context arg 'security_services' is expected
# to be list of dicts with data for creation of security services.
# Example:
# security_services = [
# {'type': 'LDAP', 'dns_ip': 'foo_ip', 'server': 'bar_ip',
# 'domain': 'quuz_domain', 'user': 'ololo',
# 'password': 'fake_password'}
# ]
# Where 'type' is required key and should have one of following
# values: 'active_directory', 'kerberos' or 'ldap'.
# This context arg is used only if share networks are used and
# autocreated.
"security_services": {"type": "array"},
"security_services": {
"type": "array",
"description":
"It is expected to be list of dicts with data for creation"
" of security services.",
"items": {
"type": "object",
"properties": {"type": {"enum": ["active_directory",
"kerberos", "ldap"]}},
"required": ["type"],
"additionalProperties": True,
"description":
"Data for creation of security services. \n "
"Example:\n\n"
" .. code-block:: json\n\n"
" {'type': 'LDAP', 'dns_ip': 'foo_ip', \n"
" 'server': 'bar_ip', 'domain': 'quuz_domain',\n"
" 'user': 'ololo', 'password': 'fake_password'}\n"
}
},
},
"additionalProperties": False
}

View File

@@ -26,6 +26,12 @@ LOG = logging.getLogger(__name__)
@context.configure(name="keypair", order=310)
class Keypair(context.Context):
# NOTE(andreykurilin): "type" != "null", since we need to support backward
# compatibility(previously empty dict was valid) and I hope in near
# future, we will extend this context to accept keys.
CONFIG_SCHEMA = {"type": "object",
"additionalProperties": False}
def _generate_keypair(self, credential):
nova_client = osclients.Clients(credential).nova()
# NOTE(hughsaunders): If keypair exists, it should re-generate name.

View File

@@ -15,7 +15,6 @@
from rally.common.i18n import _
from rally.common import logging
from rally.common import utils as rutils
from rally import consts
from rally import osclients
from rally.plugins.openstack.cleanup import manager as resource_manager
from rally.plugins.openstack.scenarios.nova import utils as nova_utils
@@ -35,9 +34,9 @@ class ServerGenerator(context.Context):
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"image": {
"description": "Name of image to boot server(s) from.",
"type": "object",
"properties": {
"name": {
@@ -46,6 +45,7 @@ class ServerGenerator(context.Context):
}
},
"flavor": {
"description": "Name of flavor to boot server(s) with.",
"type": "object",
"properties": {
"name": {
@@ -54,19 +54,23 @@ class ServerGenerator(context.Context):
}
},
"servers_per_tenant": {
"description": "Number of servers to boot in each Tenant.",
"type": "integer",
"minimum": 1
},
"auto_assign_nic": {
"description": "True if NICs should be assigned.",
"type": "boolean",
},
"nics": {
"type": "array",
"properties": {
"net-id": {
"type": "string"
}
}
"description": "List of networks to attach to server.",
"items": {"oneOf": [
{"type": "object",
"properties": {"net-id": {"type": "string"}},
"description": "Network ID in a format like OpenStack API"
" expects to see."},
{"type": "string", "description": "Network ID."}]}
}
},
"required": ["image", "flavor"],

View File

@@ -52,9 +52,11 @@ class SaharaImage(context.Context):
}
},
"oneOf": [
{"required": ["image_url", "username", "plugin_name",
{"description": "Create an image.",
"required": ["image_url", "username", "plugin_name",
"hadoop_version"]},
{"required": ["image_uuid"]}
{"description": "Use an existing image.",
"required": ["image_uuid"]}
],
"additionalProperties": False
}

View File

@@ -35,14 +35,15 @@ class ImageCommandCustomizerContext(custom_image.BaseCustomImageGenerator):
CONFIG_SCHEMA["definitions"] = {
"stringOrStringList": {
"anyOf": [
{"type": "string"},
{"type": "string", "description": "just a string"},
{
"type": "array",
"type": "array", "description": "just a list of strings",
"items": {"type": "string"}
}
]
},
"scriptFile": {
"type": "object",
"properties": {
"script_file": {"$ref": "#/definitions/stringOrStringList"},
"interpreter": {"$ref": "#/definitions/stringOrStringList"},
@@ -52,6 +53,7 @@ class ImageCommandCustomizerContext(custom_image.BaseCustomImageGenerator):
"additionalProperties": False,
},
"scriptInline": {
"type": "object",
"properties": {
"script_inline": {"type": "string"},
"interpreter": {"$ref": "#/definitions/stringOrStringList"},
@@ -61,6 +63,7 @@ class ImageCommandCustomizerContext(custom_image.BaseCustomImageGenerator):
"additionalProperties": False,
},
"commandPath": {
"type": "object",
"properties": {
"remote_path": {"$ref": "#/definitions/stringOrStringList"},
"local_path": {"type": "string"},
@@ -70,7 +73,6 @@ class ImageCommandCustomizerContext(custom_image.BaseCustomImageGenerator):
"additionalProperties": False,
},
"commandDict": {
"type": "object",
"oneOf": [
{"$ref": "#/definitions/scriptFile"},
{"$ref": "#/definitions/scriptInline"},

View File

@@ -36,34 +36,37 @@ class AuditTemplateGenerator(context.Context):
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"fill_strategy": {"enum": ["round_robin", "random", None]},
"params": {
"type": "array",
"minItems": 1,
"uniqueItems": True,
"items": {
"type": "object",
"properties": {
"goal": {
"type": "object",
"properties": {
"name": {
"type": "string"
"properties": {
"audit_templates_per_admin": {"type": "integer", "minimum": 1},
"fill_strategy": {"enum": ["round_robin", "random", None]},
"params": {
"type": "array",
"minItems": 1,
"uniqueItems": True,
"items": {
"type": "object",
"properties": {
"goal": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
},
"strategy": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"strategy": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
},
},
},
},
}
},
"additionalProperties": True,
"additionalProperties": False,
"required": ["params"]
}

View File

@@ -46,7 +46,7 @@ LOG = logging.getLogger(__name__)
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova", "cinder"],
"keypair": {}, "allow_ssh": {}},
"keypair": {}, "allow_ssh": None},
name="VMTasks.boot_runcommand_delete")
class BootRuncommandDelete(vm_utils.VMScenario):
@@ -80,8 +80,9 @@ class BootRuncommandDelete(vm_utils.VMScenario):
`local_path' is given). Uploading an interpreter is possible
but requires that `remote_path' and `interpreter' path do match.
Examples:
Examples::
.. code-block:: python
# Run a `local_script.pl' file sending it to a remote
# Perl interpreter
@@ -228,11 +229,12 @@ class RuncommandHeat(vm_utils.VMScenario):
Workload can be either file or resource:
.. code-block: json
.. code-block:: json
{"file": "/path/to/file.sh"}
{"resource": ["package.module", "workload.py"]}
Also it should contain "username" key.
Given file will be uploaded to `gate_node` and started. This script
@@ -415,7 +417,7 @@ EOF
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova", "cinder"],
"keypair": {}, "allow_ssh": {}},
"keypair": {}, "allow_ssh": None},
name="VMTasks.dd_load_test")
class DDLoadTest(BootRuncommandDelete):
@@ -425,4 +427,4 @@ class DDLoadTest(BootRuncommandDelete):
Example Script in rally-jobs/extra/install_benchmark.sh
"""
command["script_inline"] = BASH_DD_LOAD_TEST
return super(DDLoadTest, self).run(command=command, **kwargs)
return super(DDLoadTest, self).run(command=command, **kwargs)

View File

@@ -37,7 +37,7 @@ class CreateAuditTemplateAndDelete(utils.WatcherScenario):
:param goal: The goal audit template is based on
:param strategy: The strategy used to provide resource optimization
algorithm
algorithm
"""
audit_template = self._create_audit_template(goal, strategy)
@@ -59,12 +59,13 @@ class ListAuditTemplates(utils.WatcherScenario):
:param goal: Name of the goal
:param strategy: Name of the strategy
:param limit: The maximum number of results to return per
request, if:
1) limit > 0, the maximum number of audit templates to return.
2) limit == 0, return the entire list of audit_templates.
3) limit param is NOT specified (None), the number of items
returned respect the maximum imposed by the Watcher API
(see Watcher's api.max_limit option).
request, if:
1) limit > 0, the maximum number of audit templates to return.
2) limit == 0, return the entire list of audit_templates.
3) limit param is NOT specified (None), the number of items
returned respect the maximum imposed by the Watcher API
(see Watcher's api.max_limit option).
:param sort_key: Optional, field used for sorting.
:param sort_dir: Optional, direction of sorting, either 'asc' (the
default) or 'desc'.

View File

@@ -0,0 +1,168 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from rally.common.plugin import plugin
from rally import plugins
from tests.unit import test
class ConfigSchemasTestCase(test.TestCase):
OBJECT_TYPE_KEYS = {"$schema", "type", "description", "required",
"properties", "patternProperties",
"additionalProperties", "oneOf", "anyOf"}
ARRAY_TYPE_KEYS = {"$schema", "type", "description", "items",
"uniqueItems", "minItems", "maxItems",
"additionalItems"}
NUMBER_TYPE_KEYS = {"$schema", "type", "description", "minimum", "maximum",
"exclusiveMinimum"}
STRING_TYPE_KEYS = {"$schema", "type", "description", "pattern"}
def fail(self, p, schema, msg):
super(ConfigSchemasTestCase, self).fail(
"Config schema of plugin '%s' (%s) is invalid. %s "
"(Schema: %s)" % (p.get_name(),
"%s.%s" % (p.__module__, p.__name__),
msg, schema))
def _check_anyOf_or_oneOf(self, p, schema, definitions):
if "anyOf" in schema or "oneOf" in schema:
key = "anyOf" if "anyOf" in schema else "oneOf"
for case in schema[key]:
if "description" not in case and "$ref" not in case:
self.fail(p, schema, "Each case of '%s' should have "
"description." % key)
full_schema = copy.deepcopy(schema)
full_schema.pop(key)
for k, v in case.items():
full_schema[k] = v
self._check_item(p, full_schema, definitions)
def _check_object_type(self, p, schema, definitions):
unexpected_keys = set(schema.keys()) - self.OBJECT_TYPE_KEYS
if "definitions" in unexpected_keys:
# TODO(andreykurilin): do not use definitions since it is a hard
# task to parse and display them
unexpected_keys -= {"definitions"}
if unexpected_keys:
self.fail(p, schema, ("Found unexpected key(s) for object type: "
"%s." % ", ".join(unexpected_keys)))
if "patternProperties" in schema:
if "properties" in schema:
self.fail(p, schema, "Usage both 'patternProperties' and "
"'properties' in one time is restricted.")
if not isinstance(schema["patternProperties"], dict):
self.fail(p, schema, "Field 'patternProperties' should be a "
"dict.")
for pattern, description in schema["patternProperties"].items():
self._check_item(p, description, definitions)
if "properties" in schema:
for property_name, description in schema["properties"].items():
self._check_item(p, description, definitions)
def _check_array_type(self, p, schema, definitions):
unexpected_keys = set(schema.keys()) - self.ARRAY_TYPE_KEYS
if "additionalProperties" in unexpected_keys:
self.fail(p, schema, "Array type doesn't support "
"'additionalProperties' field.")
if unexpected_keys:
self.fail(p, schema, ("Found unexpected key(s) for array type: "
"%s." % ", ".join(unexpected_keys)))
if "items" not in schema:
self.fail(p, schema, "Expected items of array type should be "
"described via 'items' field.")
if isinstance(schema["items"], dict):
self._check_item(p, schema["items"], definitions)
if "additionalItems" in schema:
self.fail(p, schema, "When items is a single schema, the "
"`additionalItems` keyword is "
"meaningless, and it should not be used.")
elif isinstance(schema["items"], list):
for item in schema["items"]:
self._check_item(p, item, definitions)
else:
self.fail(p, schema, ("Field 'items' of array type should be a "
"list or a dict, but not '%s'" %
type(schema["items"])))
def _check_string_type(self, p, schema):
unexpected_keys = set(schema.keys()) - self.STRING_TYPE_KEYS
if unexpected_keys:
self.fail(p, schema, ("Found unexpected key(s) for string type: "
"%s." % ", ".join(unexpected_keys)))
def _check_number_type(self, p, schema):
unexpected_keys = set(schema.keys()) - self.NUMBER_TYPE_KEYS
if unexpected_keys:
self.fail(p, schema, ("Found unexpected key(s) for integer/number "
"type: %s." % ", ".join(unexpected_keys)))
def _check_simpliest_types(self, p, schema):
unexpected_keys = set(schema.keys()) - {"type", "description"}
if unexpected_keys:
self.fail(p, schema, ("Found unexpected key(s) for boolean type: "
"%s." % ", ".join(unexpected_keys)))
def _check_item(self, p, schema, definitions):
if "type" in schema or "anyOf" in schema or "oneOf" in schema:
if "anyOf" in schema or "oneOf" in schema:
self._check_anyOf_or_oneOf(p, schema, definitions)
elif "type" in schema:
if schema["type"] == "object":
self._check_object_type(p, schema, definitions)
elif schema["type"] == "array":
self._check_array_type(p, schema, definitions)
elif schema["type"] == "string":
self._check_string_type(p, schema)
elif schema["type"] in ("number", "integer"):
self._check_number_type(p, schema)
elif schema["type"] in ("boolean", "null"):
self._check_simpliest_types(p, schema)
else:
self.fail(p, schema,
"Wrong type is used: %s" % schema["type"])
elif "enum" in schema:
pass
elif schema == {}:
# NOTE(andreykurilin): an empty dict means that the user can
# transmit whatever he want in whatever he want format. It is
# not the case which we want to support.
self.fail(p, schema, "Empty schema is not allowed.")
elif "$ref" in schema:
definition_name = schema["$ref"].replace("#/definitions/", "")
if definition_name not in definitions:
self.fail(p, schema,
"Definition '%s' is not found." % definition_name)
else:
self.fail(p, schema, "Wrong format.")
@plugins.ensure_plugins_are_loaded
def test_schema_is_valid(self):
for p in plugin.Plugin.get_all():
if not hasattr(p, "CONFIG_SCHEMA"):
continue
# allow only top level definitions
definitions = p.CONFIG_SCHEMA.get("definitions", {})
for definition in definitions.values():
self._check_item(p, definition, definitions)
# check schema itself
self._check_item(p, p.CONFIG_SCHEMA, definitions)

View File

@@ -55,7 +55,7 @@ class TaskSampleTestCase(test.TestCase):
with open(full_path) as task_file:
try:
task_config = yaml.safe_load(api.Task.render_template
task_config = yaml.safe_load(api._Task.render_template
(task_file.read()))
eng = engine.TaskEngine(task_config,
mock.MagicMock())
@@ -82,7 +82,7 @@ class TaskSampleTestCase(test.TestCase):
full_path = os.path.join(dirname, filename)
with open(full_path) as task_file:
try:
json.loads(api.Task.render_template(task_file.read()))
json.loads(api._Task.render_template(task_file.read()))
except Exception:
print(traceback.format_exc())
self.fail("Invalid JSON file: %s" % full_path)
@@ -122,10 +122,10 @@ class TaskSampleTestCase(test.TestCase):
if os.path.exists(yaml_path) and os.path.exists(json_path):
with open(json_path) as json_file:
json_config = yaml.safe_load(api.Task.render_template
json_config = yaml.safe_load(api._Task.render_template
(json_file.read()))
with open(yaml_path) as yaml_file:
yaml_config = yaml.safe_load(api.Task.render_template
yaml_config = yaml.safe_load(api._Task.render_template
(yaml_file.read()))
self.assertEqual(json_config, yaml_config,
"Sample task configs are not equal:"

View File

@@ -17,7 +17,6 @@ import ddt
import mock
import six
from rally import consts as rally_consts
from rally.plugins.openstack.context.manila import consts
from rally.plugins.openstack.context.manila import manila_security_services
from tests.unit import test
@@ -90,12 +89,6 @@ class SecurityServicesTestCase(test.ScenarioTestCase):
self.assertEqual(inst.config.get("foo"), "bar")
self.assertFalse(inst.config.get("security_services"))
self.assertIn(
rally_consts.JSON_SCHEMA, inst.CONFIG_SCHEMA.get("$schema"))
self.assertEqual(False, inst.CONFIG_SCHEMA.get("additionalProperties"))
self.assertEqual("object", inst.CONFIG_SCHEMA.get("type"))
props = inst.CONFIG_SCHEMA.get("properties", {})
self.assertEqual({"type": "array"}, props.get("security_services"))
self.assertEqual(445, inst.get_order())
self.assertEqual(CONTEXT_NAME, inst.get_name())