Lift too strict restrictions on cross-deps role name

Allow a user to specify any arbitrary string
  as role name for cross-deps that could be
  a regexp or a TASK_ROLE_PATTERN string

  Log a warning when task is not assigned
  to roles/groups/fields

  Set default logging level to INFO

Change-Id: I42c2490cf22f53892a189165698d1acd56ee4c74
Closes-bug: #1557997
This commit is contained in:
Vladimir Kuklin 2016-04-29 18:14:10 +03:00
parent 9737039e70
commit ca8240ff61
8 changed files with 192 additions and 33 deletions

View File

@ -33,6 +33,10 @@ class ValidationError(FuelPluginException):
pass pass
class TaskFieldError(ValidationError):
pass
class FileIsEmpty(ValidationError): class FileIsEmpty(ValidationError):
def __init__(self, file_path): def __init__(self, file_path):
super(FileIsEmpty, self).__init__( super(FileIsEmpty, self).__init__(

View File

@ -20,7 +20,7 @@ import logging
def configure_logger(debug=False): def configure_logger(debug=False):
logger = logging.getLogger('fuel_plugin_builder') logger = logging.getLogger('fuel_plugin_builder')
logger.setLevel(logging.CRITICAL) logger.setLevel(logging.INFO)
if debug: if debug:
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)

View File

@ -13,7 +13,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import jsonschema
import mock import mock
from fuel_plugin_builder import errors from fuel_plugin_builder import errors
@ -37,6 +37,7 @@ class TestBaseValidator(BaseTestCase):
self.validator = NewValidator(self.plugin_path) self.validator = NewValidator(self.plugin_path)
self.data = {'data': 'data1'} self.data = {'data': 'data1'}
self.schema = self.make_schema(['data'], {'data': {'type': 'string'}}) self.schema = self.make_schema(['data'], {'data': {'type': 'string'}})
self.format_checker = jsonschema.FormatChecker
@classmethod @classmethod
def make_schema(cls, required, properties): def make_schema(cls, required, properties):
@ -54,7 +55,7 @@ class TestBaseValidator(BaseTestCase):
'file_path') 'file_path')
schema_mock.validate.assert_called_once_with( schema_mock.validate.assert_called_once_with(
self.data, self.data,
self.schema) self.schema, format_checker=self.format_checker)
def test_validate_schema_raises_error(self): def test_validate_schema_raises_error(self):
schema = self.make_schema(['key'], {'key': {'type': 'string'}}) schema = self.make_schema(['key'], {'key': {'type': 'string'}})

View File

@ -290,7 +290,7 @@ class TestValidatorV4(TestValidatorV3):
mock_data = [{ mock_data = [{
'id': 'plugin_task', 'id': 'plugin_task',
'type': 'puppet', 'type': 'puppet',
'group': ['controller'], 'groups': ['controller'],
'reexecute_on': ['bla']}] 'reexecute_on': ['bla']}]
err_msg = "File '/tmp/plugin_path/deployment_tasks.yaml', " \ err_msg = "File '/tmp/plugin_path/deployment_tasks.yaml', " \
"'bla' is not one of" "'bla' is not one of"
@ -299,8 +299,9 @@ class TestValidatorV4(TestValidatorV3):
err_msg, self.validator.check_deployment_tasks_schema) err_msg, self.validator.check_deployment_tasks_schema)
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils') @mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
@mock.patch('fuel_plugin_builder.validators.validator_v4.logger')
def test_role_attribute_is_required_for_deployment_task_types( def test_role_attribute_is_required_for_deployment_task_types(
self, utils_mock, *args): self, logger_mock, utils_mock, *args):
deployment_tasks_data = [ deployment_tasks_data = [
{ {
'id': 'plugin_name', 'id': 'plugin_name',
@ -332,31 +333,98 @@ class TestValidatorV4(TestValidatorV3):
} }
] ]
err_msg = "File '/tmp/plugin_path/deployment_tasks.yaml', " \
"'role' is a required property, value path '0'"
for task in deployment_tasks_data: for task in deployment_tasks_data:
self.check_raised_exception( utils_mock.parse_yaml.return_value = [task]
utils_mock, [task], logger_mock.warn.reset_mock()
err_msg, self.validator.check_deployment_tasks) self.validator.check_deployment_tasks()
self.assertEqual(logger_mock.warn.call_count, 1)
# This is the section of tests inherited from the v3 validator # This is the section of tests inherited from the v3 validator
# where decorators is re-defined for module v4 # where decorators is re-defined for module v4
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_deployment_task_role(self, utils_mock, *args):
super(TestValidatorV4, self).test_check_deployment_task_role(
utils_mock)
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils') @mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
@mock.patch('fuel_plugin_builder.validators.base.utils.exists') @mock.patch('fuel_plugin_builder.validators.base.utils.exists')
def test_check_tasks_no_file(self, exists_mock, utils_mock, *args): def test_check_tasks_no_file(self, exists_mock, utils_mock, *args):
super(TestValidatorV4, self).test_check_deployment_task_role( super(TestValidatorV4, self).test_check_deployment_task_role(
exists_mock, utils_mock) exists_mock, utils_mock)
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_deployment_task_role(self, utils_mock, *args):
utils_mock.parse_yaml.return_value = [
{'id': 'plugin_name', 'type': 'group', 'groups': ['a', 'b']},
{'id': 'plugin_name', 'type': 'group', 'groups': '*'},
{'id': 'plugin_name', 'type': 'puppet', 'role': ['a', 'b']},
{'id': 'plugin_name', 'type': 'puppet', 'role': '*'},
{'id': 'plugin_name', 'type': 'shell', 'roles': ['a', 'b']},
{'id': 'plugin_name', 'type': 'shell', 'roles': '*'},
{'id': 'plugin_name', 'type': 'skipped', 'role': '/test/'},
{'id': 'plugin_name', 'type': 'stage'},
{'id': 'plugin_name', 'type': 'reboot', 'groups': 'contrail'},
{
'id': 'plugin_name',
'type': 'copy_files',
'role': '*',
'parameters': {
'files': [
{'src': 'some_source', 'dst': 'some_destination'}]}
},
{
'id': 'plugin_name',
'type': 'sync',
'role': 'plugin_name',
'parameters': {
'src': 'some_source', 'dst': 'some_destination'}
},
{
'id': 'plugin_name',
'type': 'upload_file',
'role': '/^.*plugin\w+name$/',
'parameters': {
'path': 'some_path', 'data': 'some_data'}
},
]
self.validator.check_deployment_tasks()
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils') @mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_deployment_task_role_failed(self, utils_mock, *args): def test_check_deployment_task_role_failed(self, utils_mock, *args):
super(TestValidatorV4, self).test_check_deployment_task_role_failed( mock_data = [{
utils_mock) 'id': 'plugin_name',
'type': 'group',
'role': ['plugin_n@me']}]
err_msg = "field should"
self.check_raised_exception(
utils_mock, mock_data,
err_msg, self.validator.check_deployment_tasks)
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_deployment_task_required_missing(self, utils_mock, *args):
mock_data = [{
'groups': 'plugin_name',
'type': 'puppet'}]
err_msg = 'required'
self.check_raised_exception(
utils_mock, mock_data,
err_msg, self.validator.check_deployment_tasks)
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_deployment_task_required_roles_missing_is_ok(
self, utils_mock, *args):
utils_mock.parse_yaml.return_value = [{
'id': 'plugin_name',
'type': 'stage'}]
self.validator.check_deployment_tasks()
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_deployment_task_role_regexp_failed(self, utils_mock, *args):
mock_data = [{
'id': 'plugin_name',
'type': 'group',
'role': '/[0-9]++/'}]
err_msg = "field should.*multiple repeat"
self.check_raised_exception(
utils_mock, mock_data,
err_msg, self.validator.check_deployment_tasks)
@mock.patch('fuel_plugin_builder.validators.validator_v4.utils') @mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
def test_check_group_type_deployment_task_does_not_contain_manifests( def test_check_group_type_deployment_task_does_not_contain_manifests(

View File

@ -35,15 +35,16 @@ class BaseValidator(object):
def basic_version(self): def basic_version(self):
pass pass
def __init__(self, plugin_path): def __init__(self, plugin_path, format_checker=jsonschema.FormatChecker):
self.plugin_path = plugin_path self.plugin_path = plugin_path
self.format_checker = format_checker
def validate_schema(self, data, schema, file_path, value_path=None): def validate_schema(self, data, schema, file_path, value_path=None):
logger.debug( logger.debug(
'Start schema validation for %s file, %s', file_path, schema) 'Start schema validation for %s file, %s', file_path, schema)
try: try:
jsonschema.validate(data, schema) jsonschema.validate(data, schema,
format_checker=self.format_checker)
except jsonschema.exceptions.ValidationError as exc: except jsonschema.exceptions.ValidationError as exc:
raise errors.ValidationError( raise errors.ValidationError(
self._make_error_message(exc, file_path, value_path)) self._make_error_message(exc, file_path, value_path))
@ -104,6 +105,7 @@ class BaseValidator(object):
@abc.abstractmethod @abc.abstractmethod
def validate(self): def validate(self):
"""Performs validation """Performs validation
""" """
def check_schemas(self): def check_schemas(self):
@ -169,9 +171,11 @@ class BaseValidator(object):
def check_compatibility(self): def check_compatibility(self):
"""Json schema doesn't have any conditions, so we have """Json schema doesn't have any conditions, so we have
to make sure here, that this validation schema can be used to make sure here, that this validation schema can be used
for described fuel releases for described fuel releases
""" """
meta = utils.parse_yaml(self.meta_path) meta = utils.parse_yaml(self.meta_path)
for fuel_release in meta['fuel_version']: for fuel_release in meta['fuel_version']:
if StrictVersion(fuel_release) < StrictVersion(self.basic_version): if StrictVersion(fuel_release) < StrictVersion(self.basic_version):

View File

@ -0,0 +1,53 @@
# -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from sre_constants import error as sre_error
import jsonschema
import six
from fuel_plugin_builder import errors
class FormatChecker(jsonschema.FormatChecker):
def __init__(self, role_patterns=(), *args, **kwargs):
super(FormatChecker, self).__init__()
@self.checks('fuel_task_role_format')
def _validate_role(instance):
sre_msg = None
if isinstance(instance, six.string_types):
if instance.startswith('/') and instance.endswith('/'):
try:
re.compile(instance[1:-1])
return True
except sre_error as e:
sre_msg = str(e)
else:
for role_pattern in role_patterns:
if re.match(role_pattern, instance):
return True
err_msg = "Role field should be either a valid " \
"regexp enclosed by " \
"slashes or a string of '{0}' or an " \
"array of those. " \
"Got '{1}' instead.".format(", ".join(role_patterns),
instance)
if sre_msg:
err_msg += "SRE error: \"{0}\"".format(sre_msg)
raise errors.TaskFieldError(err_msg)

View File

@ -27,16 +27,26 @@ COMPATIBLE_COMPONENT_NAME_PATTERN = \
'^({0}):([0-9a-z_-]+:)*([0-9a-z_-]+|(\*)?)$'.format(COMPONENTS_TYPES_STR) '^({0}):([0-9a-z_-]+:)*([0-9a-z_-]+|(\*)?)$'.format(COMPONENTS_TYPES_STR)
TASK_NAME_PATTERN = TASK_ROLE_PATTERN = '^[0-9a-zA-Z_-]+$' TASK_NAME_PATTERN = TASK_ROLE_PATTERN = '^[0-9a-zA-Z_-]+$|^\*$'
NETWORK_ROLE_PATTERN = '^[0-9a-z_-]+$' NETWORK_ROLE_PATTERN = '^[0-9a-z_-]+$'
FILE_PERMISSIONS_PATTERN = '^[0-7]{4}$' FILE_PERMISSIONS_PATTERN = '^[0-7]{4}$'
TASK_VERSION_PATTERN = '^\d+.\d+.\d+$' TASK_VERSION_PATTERN = '^\d+.\d+.\d+$'
STAGE_PATTERN = '^(post_deployment|pre_deployment)' \ STAGE_PATTERN = '^(post_deployment|pre_deployment)' \
'(/[-+]?([0-9]*\.[0-9]+|[0-9]+))?$' '(/[-+]?([0-9]*\.[0-9]+|[0-9]+))?$'
ROLE_ALIASES = ('roles', 'groups', 'role')
TASK_OBLIGATORY_FIELDS = ['id', 'type']
ROLELESS_TASKS = ('stage')
class SchemaV4(SchemaV3): class SchemaV4(SchemaV3):
def __init__(self):
super(SchemaV4, self).__init__()
self.role_pattern = TASK_ROLE_PATTERN
self.roleless_tasks = ROLELESS_TASKS
self.role_aliases = ROLE_ALIASES
@property @property
def _task_relation(self): def _task_relation(self):
return { return {
@ -58,13 +68,13 @@ class SchemaV4(SchemaV3):
'oneOf': [ 'oneOf': [
{ {
'type': 'string', 'type': 'string',
'enum': ['*', 'master', 'self'] 'format': 'fuel_task_role_format'
}, },
{ {
'type': 'array', 'type': 'array',
'items': { 'items': {
'type': 'string', 'type': 'string',
'pattern': TASK_ROLE_PATTERN 'format': 'fuel_task_role_format'
} }
} }
] ]
@ -95,7 +105,8 @@ class SchemaV4(SchemaV3):
} }
} }
def _gen_task_schema(self, task_types, required=None, parameters=None): def _gen_task_schema(self, task_types, required=None,
parameters=None):
"""Generate deployment task schema using prototype. """Generate deployment task schema using prototype.
:param task_types: task types :param task_types: task types
@ -119,11 +130,12 @@ class SchemaV4(SchemaV3):
} }
parameters.setdefault("properties", {}) parameters.setdefault("properties", {})
parameters["properties"].setdefault("strategy", self._task_strategy) parameters["properties"].setdefault("strategy", self._task_strategy)
task_specific_req_fields = list(set(TASK_OBLIGATORY_FIELDS +
(required or [])))
return { return {
'$schema': 'http://json-schema.org/draft-04/schema#', '$schema': 'http://json-schema.org/draft-04/schema#',
'type': 'object', 'type': 'object',
'required': list(set(['id', 'type'] + (required or []))), 'required': task_specific_req_fields,
'properties': { 'properties': {
'type': {'enum': task_types}, 'type': {'enum': task_types},
'id': { 'id': {
@ -132,6 +144,8 @@ class SchemaV4(SchemaV3):
'version': { 'version': {
'type': 'string', "pattern": TASK_VERSION_PATTERN}, 'type': 'string', "pattern": TASK_VERSION_PATTERN},
'role': self._task_role, 'role': self._task_role,
'groups': self._task_role,
'roles': self._task_role,
'required_for': self.task_group, 'required_for': self.task_group,
'requires': self.task_group, 'requires': self.task_group,
'cross-depends': { 'cross-depends': {
@ -148,7 +162,7 @@ class SchemaV4(SchemaV3):
'pattern': TASK_ROLE_PATTERN}}, 'pattern': TASK_ROLE_PATTERN}},
'reexecute_on': self._task_reexecute, 'reexecute_on': self._task_reexecute,
'parameters': parameters or {}, 'parameters': parameters or {},
} },
} }
@property @property
@ -180,7 +194,7 @@ class SchemaV4(SchemaV3):
def copy_files_task(self): def copy_files_task(self):
return self._gen_task_schema( return self._gen_task_schema(
"copy_files", "copy_files",
['role', 'parameters'], ['parameters'],
{ {
'type': 'object', 'type': 'object',
'required': ['files'], 'required': ['files'],
@ -203,7 +217,7 @@ class SchemaV4(SchemaV3):
@property @property
def group_task(self): def group_task(self):
return self._gen_task_schema("group", ['role']) return self._gen_task_schema("group", [])
@property @property
def puppet_task(self): def puppet_task(self):
@ -242,7 +256,7 @@ class SchemaV4(SchemaV3):
def shell_task(self): def shell_task(self):
return self._gen_task_schema( return self._gen_task_schema(
"shell", "shell",
['role'], [],
{ {
'type': 'object', 'type': 'object',
'required': ['cmd'], 'required': ['cmd'],
@ -271,7 +285,7 @@ class SchemaV4(SchemaV3):
def sync_task(self): def sync_task(self):
return self._gen_task_schema( return self._gen_task_schema(
"sync", "sync",
['role', 'parameters'], ['parameters'],
{ {
'type': 'object', 'type': 'object',
'required': ['src', 'dst'], 'required': ['src', 'dst'],
@ -287,7 +301,7 @@ class SchemaV4(SchemaV3):
def upload_file_task(self): def upload_file_task(self):
return self._gen_task_schema( return self._gen_task_schema(
"upload_file", "upload_file",
['role', 'parameters'], ['parameters'],
{ {
'type': 'object', 'type': 'object',
'required': ['path', 'data'], 'required': ['path', 'data'],

View File

@ -19,9 +19,11 @@ from os.path import join as join_path
from fuel_plugin_builder import errors from fuel_plugin_builder import errors
from fuel_plugin_builder import utils from fuel_plugin_builder import utils
from fuel_plugin_builder.validators.formatchecker import FormatChecker
from fuel_plugin_builder.validators.schemas import SchemaV4 from fuel_plugin_builder.validators.schemas import SchemaV4
from fuel_plugin_builder.validators import ValidatorV3 from fuel_plugin_builder.validators import ValidatorV3
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -30,7 +32,8 @@ class ValidatorV4(ValidatorV3):
schema = SchemaV4() schema = SchemaV4()
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(ValidatorV4, self).__init__(*args, **kwargs) super(ValidatorV4, self).__init__(format_checker=FormatChecker(
role_patterns=[self.schema.role_pattern]), *args, **kwargs)
self.components_path = join_path(self.plugin_path, 'components.yaml') self.components_path = join_path(self.plugin_path, 'components.yaml')
@property @property
@ -89,6 +92,18 @@ class ValidatorV4(ValidatorV3):
error_msg = 'There is no such task type:' \ error_msg = 'There is no such task type:' \
'{0}'.format(deployment_task['type']) '{0}'.format(deployment_task['type'])
raise errors.ValidationError(error_msg) raise errors.ValidationError(error_msg)
if deployment_task['type'] not in self.schema.roleless_tasks:
for role_alias in self.schema.role_aliases:
deployment_role = deployment_task.get(role_alias)
if deployment_role:
break
else:
logger.warn(
'Task {0} does not contain {1} fields. That '
'may lead to tasks being unassigned to nodes.'.
format(deployment_task['id'], '/'.
join(self.schema.role_aliases)))
self.validate_schema( self.validate_schema(
deployment_task, deployment_task,
schemas[deployment_task['type']], schemas[deployment_task['type']],