Harden v2 DSL schema for validation
Currently, syntax errors in the DSL do not surface until runtime while in the middle of running a workflow. The change here hardens the v2 model and schema to catch obvious errors in the DSL prior to the definition being created and updated. This is also the pre-requisite for the API endpoint to validate workflow/workbook DSL. Change-Id: I0efbfcc3a42446951fd3550892768a668c6283d1 Implements: blueprint api-validate-dsl
This commit is contained in:
parent
e84f091e1e
commit
82fb7ee5f6
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -102,11 +101,11 @@ class DSLParsingException(MistralException):
|
||||
http_code = 400
|
||||
|
||||
|
||||
class InvalidModelException(MistralException):
|
||||
class InvalidModelException(DSLParsingException):
|
||||
http_code = 400
|
||||
message = "Wrong entity definition"
|
||||
|
||||
|
||||
class InvalidResultException(MistralException):
|
||||
http_code = 400
|
||||
message = "Unable to parse result"
|
||||
message = "Unable to parse result"
|
||||
|
@ -1,15 +0,0 @@
|
||||
---
|
||||
version: '2.0'
|
||||
|
||||
name: my_wb
|
||||
|
||||
workflows:
|
||||
wf1:
|
||||
type: reverse
|
||||
tasks:
|
||||
task1:
|
||||
action: std.echo output="Hey"
|
||||
|
||||
task2:
|
||||
action: std.echo output="Hi!"
|
||||
requires: [task1]
|
117
mistral/tests/resources/workbook/v2/my_workbook.yaml
Normal file
117
mistral/tests/resources/workbook/v2/my_workbook.yaml
Normal file
@ -0,0 +1,117 @@
|
||||
version: '2.0'
|
||||
|
||||
name: my_workbook
|
||||
description: This is a test workbook
|
||||
tags: [test, v2]
|
||||
|
||||
actions:
|
||||
action1:
|
||||
description: This is a test ad-hoc action
|
||||
tags: [test, v2]
|
||||
base: std.echo
|
||||
base-input:
|
||||
output: Hello <% $.name %>!
|
||||
output: <% $ %>
|
||||
|
||||
action2:
|
||||
description: This is a test ad-hoc action with base params
|
||||
tags: [test, v2]
|
||||
base: std.echo output="Echo output"
|
||||
output: <% $ %>
|
||||
|
||||
workflows:
|
||||
wf1:
|
||||
description: This is a test workflow
|
||||
tags: [test, v2]
|
||||
type: reverse
|
||||
|
||||
input:
|
||||
- name
|
||||
|
||||
tasks:
|
||||
task1:
|
||||
description: This is a test task
|
||||
action: action1 name=<% $.name %>
|
||||
policies:
|
||||
wait-before: 2
|
||||
wait-after: 5
|
||||
retry:
|
||||
count: 10
|
||||
delay: 30
|
||||
break-on: <% $.my_val = 10 %>
|
||||
concurrency: 3
|
||||
|
||||
task2:
|
||||
requires: [task1]
|
||||
action: std.echo output="Thanks <% $.name %>!"
|
||||
|
||||
wf2:
|
||||
tags: [test, v2]
|
||||
type: direct
|
||||
|
||||
task-defaults:
|
||||
policies:
|
||||
retry:
|
||||
count: 10
|
||||
delay: 30
|
||||
break-on: <% $.my_val = 10 %>
|
||||
on-error:
|
||||
- fail: <% $.my_val = 0 %>
|
||||
on-success:
|
||||
- pause
|
||||
on-complete:
|
||||
- succeed
|
||||
|
||||
tasks:
|
||||
task3:
|
||||
workflow: wf1 name="John Doe" age=32 param1=null param2=false
|
||||
on-error:
|
||||
- task4: <% $.my_val = 1 %>
|
||||
on-success:
|
||||
- task5: <% $.my_val = 2 %>
|
||||
on-complete:
|
||||
- task6: <% $.my_val = 3 %>
|
||||
|
||||
task4:
|
||||
action: std.echo output="Task 4 echo"
|
||||
|
||||
task5:
|
||||
action: std.echo output="Task 5 echo"
|
||||
|
||||
task6:
|
||||
action: std.echo output="Task 6 echo"
|
||||
|
||||
task7:
|
||||
with-items: vm_info in <% $.vms %>
|
||||
workflow: wf2 is_true=true object_list=[1, null, "str"] is_string="50"
|
||||
on-complete:
|
||||
- task9
|
||||
- task10
|
||||
|
||||
task8:
|
||||
with-items:
|
||||
- itemX in <% $.arrayI %>
|
||||
- itemY in <% $.arrayJ %>
|
||||
workflow: wf2 expr_list=["<% $.v %>", "<% $.k %>"] expr=<% $.value %>
|
||||
target: nova
|
||||
on-complete:
|
||||
- task9
|
||||
- task10
|
||||
|
||||
task9:
|
||||
join: all
|
||||
action: std.echo output="Task 9 echo"
|
||||
|
||||
task10:
|
||||
join: 2
|
||||
action: std.echo output="Task 10 echo"
|
||||
|
||||
task11:
|
||||
join: one
|
||||
action: std.echo output="Task 11 echo"
|
||||
|
||||
task12:
|
||||
action: std.http url="http://site.com?q=<% $.query %>" params=""
|
||||
|
||||
task13:
|
||||
description: No-op task
|
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -53,6 +52,20 @@ UPDATED_WORKBOOK_DB['definition'] = UPDATED_WORKBOOK_DEF
|
||||
UPDATED_WORKBOOK = copy.copy(WORKBOOK)
|
||||
UPDATED_WORKBOOK['definition'] = UPDATED_WORKBOOK_DEF
|
||||
|
||||
INVALID_WB_DEFINITION = """
|
||||
---
|
||||
version: '2.0'
|
||||
name: 'book'
|
||||
|
||||
workflows:
|
||||
flow:
|
||||
type: direct
|
||||
tasks:
|
||||
task1:
|
||||
action: std.echo output="Hi"
|
||||
workflow: wf1
|
||||
"""
|
||||
|
||||
MOCK_WORKBOOK = mock.MagicMock(return_value=WORKBOOK_DB)
|
||||
MOCK_WORKBOOKS = mock.MagicMock(return_value=[WORKBOOK_DB])
|
||||
MOCK_UPDATED_WORKBOOK = mock.MagicMock(return_value=UPDATED_WORKBOOK_DB)
|
||||
@ -98,6 +111,18 @@ class TestWorkbooksController(base.FunctionalTest):
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
def test_put_invalid(self):
|
||||
resp = self.app.put(
|
||||
'/v2/workbooks',
|
||||
INVALID_WB_DEFINITION,
|
||||
headers={'Content-Type': 'text/plain'},
|
||||
expect_errors=True
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
self.assertIn("Task properties 'action' and 'workflow' "
|
||||
"can't be specified both", resp.body)
|
||||
|
||||
@mock.patch.object(workbooks, "create_workbook_v2", MOCK_WORKBOOK)
|
||||
def test_post(self):
|
||||
resp = self.app.post(
|
||||
@ -120,6 +145,18 @@ class TestWorkbooksController(base.FunctionalTest):
|
||||
|
||||
self.assertEqual(resp.status_int, 409)
|
||||
|
||||
def test_post_invalid(self):
|
||||
resp = self.app.post(
|
||||
'/v2/workbooks',
|
||||
INVALID_WB_DEFINITION,
|
||||
headers={'Content-Type': 'text/plain'},
|
||||
expect_errors=True
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
self.assertIn("Task properties 'action' and 'workflow' "
|
||||
"can't be specified both", resp.body)
|
||||
|
||||
@mock.patch.object(db_api, "delete_workbook", MOCK_DELETE)
|
||||
def test_delete(self):
|
||||
resp = self.app.delete('/v2/workbooks/123')
|
||||
|
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -73,6 +72,19 @@ UPDATED_WF_DB['definition'] = UPDATED_WF_DEFINITION
|
||||
UPDATED_WF = copy.copy(WF)
|
||||
UPDATED_WF['definition'] = UPDATED_WF_DEFINITION
|
||||
|
||||
INVALID_WF_DEFINITION = """
|
||||
---
|
||||
version: '2.0'
|
||||
|
||||
flow:
|
||||
type: direct
|
||||
|
||||
tasks:
|
||||
task1:
|
||||
action: std.echo output="Hi"
|
||||
workflow: wf1
|
||||
"""
|
||||
|
||||
MOCK_WF = mock.MagicMock(return_value=WF_DB)
|
||||
MOCK_WFS = mock.MagicMock(return_value=[WF_DB])
|
||||
MOCK_UPDATED_WF = mock.MagicMock(return_value=UPDATED_WF_DB)
|
||||
@ -124,6 +136,18 @@ class TestWorkflowsController(base.FunctionalTest):
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
def test_put_invalid(self):
|
||||
resp = self.app.put(
|
||||
'/v2/workflows',
|
||||
INVALID_WF_DEFINITION,
|
||||
headers={'Content-Type': 'text/plain'},
|
||||
expect_errors=True
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
self.assertIn("Task properties 'action' and 'workflow' "
|
||||
"can't be specified both", resp.body)
|
||||
|
||||
@mock.patch.object(db_api, "create_workflow_definition")
|
||||
def test_post(self, mock_mtd):
|
||||
mock_mtd.return_value = WF_DB
|
||||
@ -155,6 +179,18 @@ class TestWorkflowsController(base.FunctionalTest):
|
||||
|
||||
self.assertEqual(resp.status_int, 409)
|
||||
|
||||
def test_post_invalid(self):
|
||||
resp = self.app.post(
|
||||
'/v2/workflows',
|
||||
INVALID_WF_DEFINITION,
|
||||
headers={'Content-Type': 'text/plain'},
|
||||
expect_errors=True
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_int, 400)
|
||||
self.assertIn("Task properties 'action' and 'workflow' "
|
||||
"can't be specified both", resp.body)
|
||||
|
||||
@mock.patch.object(db_api, "delete_workflow_definition", MOCK_DELETE)
|
||||
def test_delete(self):
|
||||
resp = self.app.delete('/v2/workflows/123')
|
||||
|
121
mistral/tests/unit/workbook/v2/base.py
Normal file
121
mistral/tests/unit/workbook/v2/base.py
Normal file
@ -0,0 +1,121 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import yaml
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
from mistral import utils
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowSpecValidationTestCase(base.BaseTest):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(WorkflowSpecValidationTestCase, self).__init__(*args, **kwargs)
|
||||
|
||||
# The relative resource path is ./mistral/tests/resources/workbook/v2.
|
||||
self._resource_path = 'workbook/v2'
|
||||
|
||||
self._spec_parser = spec_parser.get_workflow_list_spec_from_yaml
|
||||
|
||||
self._dsl_blank = {
|
||||
'version': '2.0',
|
||||
'test': {
|
||||
'type': 'direct'
|
||||
}
|
||||
}
|
||||
|
||||
self._dsl_tasks = {
|
||||
'get': {
|
||||
'action': 'std.http',
|
||||
'input': {
|
||||
'url': 'http://www.openstack.org'
|
||||
}
|
||||
},
|
||||
'echo': {
|
||||
'action': 'std.echo',
|
||||
'input': {
|
||||
'output': 'This is a test.'
|
||||
}
|
||||
},
|
||||
'email': {
|
||||
'action': 'std.email',
|
||||
'input': {
|
||||
'from_addr': 'mistral@example.com',
|
||||
'to_addrs': ['admin@example.com'],
|
||||
'subject': 'Test',
|
||||
'body': 'This is a test.',
|
||||
'smtp_server': 'localhost',
|
||||
'smtp_password': 'password'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _parse_dsl_spec(self, dsl_file=None, add_tasks=False,
|
||||
changes=None, expect_error=False):
|
||||
if dsl_file and add_tasks:
|
||||
raise Exception('The add_tasks option is not a valid '
|
||||
'combination with the dsl_file option.')
|
||||
|
||||
if dsl_file:
|
||||
dsl_yaml = base.get_resource(self._resource_path + '/' + dsl_file)
|
||||
|
||||
if changes:
|
||||
dsl_dict = yaml.safe_load(dsl_yaml)
|
||||
utils.merge_dicts(dsl_dict, changes)
|
||||
dsl_yaml = yaml.safe_dump(dsl_dict, default_flow_style=False)
|
||||
else:
|
||||
dsl_dict = copy.deepcopy(self._dsl_blank)
|
||||
|
||||
if add_tasks:
|
||||
dsl_dict['test']['tasks'] = copy.deepcopy(self._dsl_tasks)
|
||||
|
||||
if changes:
|
||||
utils.merge_dicts(dsl_dict, changes)
|
||||
|
||||
dsl_yaml = yaml.safe_dump(dsl_dict,
|
||||
default_flow_style=False)
|
||||
|
||||
if not expect_error:
|
||||
return self._spec_parser(dsl_yaml)
|
||||
else:
|
||||
return self.assertRaises(exc.DSLParsingException,
|
||||
self._spec_parser,
|
||||
dsl_yaml)
|
||||
|
||||
|
||||
class WorkbookSpecValidationTestCase(WorkflowSpecValidationTestCase):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(WorkbookSpecValidationTestCase, self).__init__(*args, **kwargs)
|
||||
|
||||
self._spec_parser = spec_parser.get_workbook_spec_from_yaml
|
||||
|
||||
self._dsl_blank = {
|
||||
'version': '2.0',
|
||||
'name': 'test_wb'
|
||||
}
|
||||
|
||||
def _parse_dsl_spec(self, dsl_file=None,
|
||||
changes=None, expect_error=False):
|
||||
return super(WorkbookSpecValidationTestCase, self)._parse_dsl_spec(
|
||||
dsl_file=dsl_file, add_tasks=False, changes=changes,
|
||||
expect_error=expect_error)
|
113
mistral/tests/unit/workbook/v2/test_actions.py
Normal file
113
mistral/tests/unit/workbook/v2/test_actions.py
Normal file
@ -0,0 +1,113 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests.unit.workbook.v2 import base
|
||||
from mistral import utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ActionSpecValidation(base.WorkbookSpecValidationTestCase):
|
||||
|
||||
def test_base_required(self):
|
||||
actions = {'actions': {'a1': {}}}
|
||||
|
||||
exception = self._parse_dsl_spec(changes=actions,
|
||||
expect_error=True)
|
||||
|
||||
self.assertIn("'base' is a required property", exception.message)
|
||||
|
||||
def test_base(self):
|
||||
tests = [
|
||||
({'actions': {'a1': {'base': ''}}}, True),
|
||||
({'actions': {'a1': {'base': None}}}, True),
|
||||
({'actions': {'a1': {'base': 12345}}}, True),
|
||||
({'actions': {'a1': {'base': 'std.noop'}}}, False),
|
||||
({'actions': {'a1': {'base': 'std.echo output="foo"'}}}, False),
|
||||
({'actions': {'a1': {'base': 'std.echo output="<% $.x %>"'}}},
|
||||
False),
|
||||
]
|
||||
|
||||
for actions, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=actions,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_base_input(self):
|
||||
tests = [
|
||||
({'base-input': {}}, True),
|
||||
({'base-input': None}, True),
|
||||
({'base-input': {'k1': 'v1', 'k2': '<% $.v2 %>'}}, False)
|
||||
]
|
||||
|
||||
actions = {
|
||||
'a1': {
|
||||
'base': 'foobar'
|
||||
}
|
||||
}
|
||||
|
||||
for base_inputs, expect_error in tests:
|
||||
overlay = {'actions': copy.deepcopy(actions)}
|
||||
utils.merge_dicts(overlay['actions']['a1'], base_inputs)
|
||||
self._parse_dsl_spec(changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_input(self):
|
||||
tests = [
|
||||
({'input': ''}, True),
|
||||
({'input': []}, True),
|
||||
({'input': ['']}, True),
|
||||
({'input': None}, True),
|
||||
({'input': ['k1', 'k2']}, False),
|
||||
({'input': ['k1', 12345]}, True)
|
||||
]
|
||||
|
||||
actions = {
|
||||
'a1': {
|
||||
'base': 'foobar'
|
||||
}
|
||||
}
|
||||
|
||||
for inputs, expect_error in tests:
|
||||
overlay = {'actions': copy.deepcopy(actions)}
|
||||
utils.merge_dicts(overlay['actions']['a1'], inputs)
|
||||
self._parse_dsl_spec(changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_output(self):
|
||||
tests = [
|
||||
({'output': None}, False),
|
||||
({'output': False}, False),
|
||||
({'output': 12345}, False),
|
||||
({'output': 0.12345}, False),
|
||||
({'output': 'foobar'}, False),
|
||||
({'output': '<% $.x %>'}, False),
|
||||
({'output': ['v1']}, False),
|
||||
({'output': {'k1': 'v1'}}, False)
|
||||
]
|
||||
|
||||
actions = {
|
||||
'a1': {
|
||||
'base': 'foobar'
|
||||
}
|
||||
}
|
||||
|
||||
for outputs, expect_error in tests:
|
||||
overlay = {'actions': copy.deepcopy(actions)}
|
||||
utils.merge_dicts(overlay['actions']['a1'], outputs)
|
||||
self._parse_dsl_spec(changes=overlay,
|
||||
expect_error=expect_error)
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2015 - Huawei Technologies Co. Ltd
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -13,8 +14,15 @@
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import exceptions
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
from mistral.tests.unit.workbook.v2 import base as v2_base
|
||||
from mistral import utils
|
||||
from mistral.workbook.v2 import tasks
|
||||
from mistral.workbook.v2 import workflows
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TaskSpecListTest(base.BaseTest):
|
||||
@ -31,3 +39,231 @@ class TaskSpecListTest(base.BaseTest):
|
||||
)
|
||||
|
||||
self.assertIn("Can not find task list specification", str(exc))
|
||||
|
||||
|
||||
class TaskSpecValidation(v2_base.WorkflowSpecValidationTestCase):
|
||||
|
||||
def test_type_injection(self):
|
||||
tests = [
|
||||
({'type': 'direct'}, False),
|
||||
({'type': 'reverse'}, False)
|
||||
]
|
||||
|
||||
for wf_type, expect_error in tests:
|
||||
overlay = {'test': wf_type}
|
||||
wfs_spec = self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
if not expect_error:
|
||||
self.assertIsInstance(wfs_spec, workflows.WorkflowListSpec)
|
||||
self.assertEqual(1, len(wfs_spec.get_workflows()))
|
||||
|
||||
wf_spec = wfs_spec.get_workflows()[0]
|
||||
|
||||
self.assertEqual(wf_type['type'], wf_spec.get_type())
|
||||
|
||||
for task in wf_spec.get_tasks():
|
||||
self.assertEqual(task._data['type'], wf_type['type'])
|
||||
|
||||
def test_action_or_workflow(self):
|
||||
tests = [
|
||||
({'action': 'std.noop'}, False),
|
||||
({'action': 'std.http url="openstack.org"'}, False),
|
||||
({'action': 'std.http url="openstack.org" timeout=10'}, False),
|
||||
({'action': 'std.http url=<% $.url %>'}, False),
|
||||
({'action': 'std.http url=<% $.url %> timeout=<% $.t %>'}, False),
|
||||
({'workflow': 'test.wf'}, False),
|
||||
({'workflow': 'test.wf k1="v1"'}, False),
|
||||
({'workflow': 'test.wf k1="v1" k2="v2"'}, False),
|
||||
({'workflow': 'test.wf k1=<% $.v1 %>'}, False),
|
||||
({'workflow': 'test.wf k1=<% $.v1 %> k2=<% $.v2 %>'}, False),
|
||||
({'action': 'std.noop', 'workflow': 'test.wf'}, True),
|
||||
({'action': 123}, True),
|
||||
({'workflow': 123}, True),
|
||||
({'action': ''}, True),
|
||||
({'workflow': ''}, True),
|
||||
({'action': None}, True),
|
||||
({'workflow': None}, True)
|
||||
]
|
||||
|
||||
for task, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {'task1': task}}}
|
||||
self._parse_dsl_spec(add_tasks=False,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_inputs(self):
|
||||
tests = [
|
||||
({'input': ''}, True),
|
||||
({'input': {}}, True),
|
||||
({'input': None}, True),
|
||||
({'input': {'k1': 'v1'}}, False),
|
||||
({'input': {'k1': '<% $.v1 %>'}}, False)
|
||||
]
|
||||
|
||||
for task_input, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {'task1': {'action': 'test.mock'}}}}
|
||||
utils.merge_dicts(overlay['test']['tasks']['task1'], task_input)
|
||||
self._parse_dsl_spec(add_tasks=False,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_with_items(self):
|
||||
tests = [
|
||||
({'with-items': ''}, True),
|
||||
({'with-items': []}, True),
|
||||
({'with-items': ['']}, True),
|
||||
({'with-items': None}, True),
|
||||
({'with-items': 12345}, True),
|
||||
({'with-items': 'x in y'}, True),
|
||||
({'with-items': '<% $.y %>'}, True),
|
||||
({'with-items': 'x in <% $.y %>'}, False),
|
||||
({'with-items': ['x in <% $.y %>']}, False),
|
||||
({'with-items': ['x in <% $.y %>', 'i in <% $.j %>']}, False)
|
||||
]
|
||||
|
||||
for with_item, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {'get': with_item}}}
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_publish(self):
|
||||
tests = [
|
||||
({'publish': ''}, True),
|
||||
({'publish': {}}, True),
|
||||
({'publish': None}, True),
|
||||
({'publish': {'k1': 'v1'}}, False),
|
||||
({'publish': {'k1': '<% $.v1 %>'}}, False)
|
||||
]
|
||||
|
||||
for output, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {'task1': {'action': 'test.mock'}}}}
|
||||
utils.merge_dicts(overlay['test']['tasks']['task1'], output)
|
||||
self._parse_dsl_spec(add_tasks=False,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_policies(self):
|
||||
tests = [
|
||||
({'policies': {'retry': {'count': 3, 'delay': 1}}}, False),
|
||||
({'policies': {'retry': {'count': '<% 3 %>', 'delay': 1}}},
|
||||
False),
|
||||
({'policies': {'retry': {'count': 3, 'delay': '<% 1 %>'}}},
|
||||
False),
|
||||
({'policies': {'retry': {'count': -3, 'delay': 1}}}, True),
|
||||
({'policies': {'retry': {'count': 3, 'delay': -1}}}, True),
|
||||
({'policies': {'retry': {'count': '3', 'delay': 1}}}, True),
|
||||
({'policies': {'retry': {'count': 3, 'delay': '1'}}}, True),
|
||||
({'policies': {'retry': None}}, True),
|
||||
({'policies': {'wait-before': 1}}, False),
|
||||
({'policies': {'wait-before': '<% 1 %>'}}, False),
|
||||
({'policies': {'wait-before': -1}}, True),
|
||||
({'policies': {'wait-before': 1.0}}, True),
|
||||
({'policies': {'wait-before': '1'}}, True),
|
||||
({'policies': {'wait-after': 1}}, False),
|
||||
({'policies': {'wait-after': '<% 1 %>'}}, False),
|
||||
({'policies': {'wait-after': -1}}, True),
|
||||
({'policies': {'wait-after': 1.0}}, True),
|
||||
({'policies': {'wait-after': '1'}}, True),
|
||||
({'policies': {'timeout': 300}}, False),
|
||||
({'policies': {'timeout': '<% 300 %>'}}, False),
|
||||
({'policies': {'timeout': -300}}, True),
|
||||
({'policies': {'timeout': 300.0}}, True),
|
||||
({'policies': {'timeout': '300'}}, True),
|
||||
({'policies': {'pause-before': False}}, False),
|
||||
({'policies': {'pause-before': '<% False %>'}}, False),
|
||||
({'policies': {'pause-before': 'False'}}, True),
|
||||
({'policies': {'concurrency': 10}}, False),
|
||||
({'policies': {'concurrency': '<% 10 %>'}}, False),
|
||||
({'policies': {'concurrency': -10}}, True),
|
||||
({'policies': {'concurrency': 10.0}}, True),
|
||||
({'policies': {'concurrency': '10'}}, True)
|
||||
]
|
||||
|
||||
for policy, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {'get': policy}}}
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_direct_transition(self):
|
||||
tests = [
|
||||
({'on-success': ['email']}, False),
|
||||
({'on-success': [{'email': '<% 1 %>'}]}, False),
|
||||
({'on-success': [{'email': '<% 1 %>'}, 'echo']}, False),
|
||||
({'on-success': 'email'}, True),
|
||||
({'on-success': None}, True),
|
||||
({'on-success': ['']}, True),
|
||||
({'on-success': []}, True),
|
||||
({'on-success': ['email', 'email']}, True),
|
||||
({'on-success': ['email', 12345]}, True),
|
||||
({'on-error': ['email']}, False),
|
||||
({'on-error': [{'email': '<% 1 %>'}]}, False),
|
||||
({'on-error': [{'email': '<% 1 %>'}, 'echo']}, False),
|
||||
({'on-error': 'email'}, True),
|
||||
({'on-error': None}, True),
|
||||
({'on-error': ['']}, True),
|
||||
({'on-error': []}, True),
|
||||
({'on-error': ['email', 'email']}, True),
|
||||
({'on-error': ['email', 12345]}, True),
|
||||
({'on-complete': ['email']}, False),
|
||||
({'on-complete': [{'email': '<% 1 %>'}]}, False),
|
||||
({'on-complete': [{'email': '<% 1 %>'}, 'echo']}, False),
|
||||
({'on-complete': 'email'}, True),
|
||||
({'on-complete': None}, True),
|
||||
({'on-complete': ['']}, True),
|
||||
({'on-complete': []}, True),
|
||||
({'on-complete': ['email', 'email']}, True),
|
||||
({'on-complete': ['email', 12345]}, True)
|
||||
]
|
||||
|
||||
for transition, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {}}}
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'get': transition})
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_join(self):
|
||||
tests = [
|
||||
({'join': ''}, True),
|
||||
({'join': None}, True),
|
||||
({'join': 'all'}, False),
|
||||
({'join': 'one'}, False),
|
||||
({'join': 0}, False),
|
||||
({'join': 3}, False),
|
||||
({'join': '3'}, True),
|
||||
({'join': -3}, True)
|
||||
]
|
||||
|
||||
on_success = {'on-success': ['email']}
|
||||
|
||||
for join, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {}}}
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'get': on_success})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'echo': on_success})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'email': join})
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_requires(self):
|
||||
tests = [
|
||||
({'requires': ''}, True),
|
||||
({'requires': []}, True),
|
||||
({'requires': ['']}, True),
|
||||
({'requires': None}, True),
|
||||
({'requires': 12345}, True),
|
||||
({'requires': ['echo']}, False),
|
||||
({'requires': ['echo', 'get']}, False)
|
||||
]
|
||||
|
||||
for require, expect_error in tests:
|
||||
overlay = {'test': {'tasks': {}}}
|
||||
utils.merge_dicts(overlay['test'], {'type': 'reverse'})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'email': require})
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -14,234 +13,22 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import exceptions
|
||||
from mistral.tests import base
|
||||
from mistral.workbook import parser as spec_parser
|
||||
from mistral.workbook.v2 import tasks
|
||||
import copy
|
||||
|
||||
VALID_WB = """
|
||||
---
|
||||
version: '2.0'
|
||||
import yaml
|
||||
|
||||
name: my_workbook
|
||||
description: This is a test workbook
|
||||
tags: [test, v2]
|
||||
|
||||
actions:
|
||||
action1:
|
||||
description: This is a test ad-hoc action
|
||||
tags: [test, v2]
|
||||
base: std.echo
|
||||
base-input:
|
||||
output: Hello <% $.name %>!
|
||||
output: <% $ %>
|
||||
|
||||
action2:
|
||||
description: This is a test ad-hoc action with base params
|
||||
tags: [test, v2]
|
||||
base: std.echo output="Echo output"
|
||||
output: <% $ %>
|
||||
|
||||
workflows:
|
||||
wf1:
|
||||
description: This is a test workflow
|
||||
tags: [test, v2]
|
||||
type: reverse
|
||||
|
||||
input:
|
||||
- name
|
||||
|
||||
tasks:
|
||||
task1:
|
||||
description: This is a test task
|
||||
action: action1 name=<% $.name %>
|
||||
policies:
|
||||
wait-before: 2
|
||||
wait-after: 5
|
||||
retry:
|
||||
count: 10
|
||||
delay: 30
|
||||
break-on: <% $.my_val = 10 %>
|
||||
concurrency: 3
|
||||
|
||||
task2:
|
||||
requires: [task1]
|
||||
action: std.echo output="Thanks <% $.name %>!"
|
||||
|
||||
wf2:
|
||||
tags: [test, v2]
|
||||
type: direct
|
||||
|
||||
task-defaults:
|
||||
policies:
|
||||
retry:
|
||||
count: 10
|
||||
delay: 30
|
||||
break-on: <% $.my_val = 10 %>
|
||||
on-error:
|
||||
- fail: <% $.my_val = 0 %>
|
||||
on-success:
|
||||
- pause
|
||||
on-complete:
|
||||
- succeed
|
||||
|
||||
tasks:
|
||||
task3:
|
||||
workflow: wf1 name="John Doe" age=32 param1=null param2=false
|
||||
on-error:
|
||||
- task4: <% $.my_val = 1 %>
|
||||
on-success:
|
||||
- task5: <% $.my_val = 2 %>
|
||||
on-complete:
|
||||
- task6: <% $.my_val = 3 %>
|
||||
|
||||
task4:
|
||||
action: std.echo output="Task 4 echo"
|
||||
|
||||
task5:
|
||||
action: std.echo output="Task 5 echo"
|
||||
|
||||
task6:
|
||||
action: std.echo output="Task 6 echo"
|
||||
|
||||
task7:
|
||||
with-items: vm_info in <% $.vms %>
|
||||
workflow: wf2 is_true=true object_list=[1, null, "str"] is_string="50"
|
||||
on-complete:
|
||||
- task9
|
||||
- task10
|
||||
|
||||
task8:
|
||||
with-items:
|
||||
- itemX in <% $.arrayI %>
|
||||
- itemY in <% $.arrayJ %>
|
||||
workflow: wf2 expr_list=["<% $.v %>", "<% $.k %>"] expr=<% $.value %>
|
||||
target: nova
|
||||
on-complete:
|
||||
- task9
|
||||
- task10
|
||||
|
||||
task9:
|
||||
join: all
|
||||
action: std.echo output="Task 9 echo"
|
||||
|
||||
task10:
|
||||
join: 2
|
||||
action: std.echo output="Task 10 echo"
|
||||
|
||||
task11:
|
||||
join: one
|
||||
action: std.echo output="Task 11 echo"
|
||||
|
||||
task12:
|
||||
action: std.http url="http://site.com?q=<% $.query %>" params=""
|
||||
|
||||
task13:
|
||||
description: No-op task
|
||||
"""
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests.unit.workbook.v2 import base
|
||||
|
||||
|
||||
INVALID_WB = """
|
||||
version: 2.0
|
||||
|
||||
name: wb
|
||||
|
||||
workflows:
|
||||
wf1:
|
||||
type: direct
|
||||
|
||||
tasks:
|
||||
task1:
|
||||
action: std.echo output="Hey!"
|
||||
with-items:
|
||||
- vms 3
|
||||
|
||||
"""
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
INVALID_WF = """
|
||||
--
|
||||
version: 2.0
|
||||
|
||||
name: wb
|
||||
|
||||
workflows:
|
||||
wf1:
|
||||
type: direct
|
||||
|
||||
tasks:
|
||||
task1:
|
||||
action: std.echo output="Hey!"
|
||||
|
||||
"""
|
||||
|
||||
DIRECT_WF = """
|
||||
---
|
||||
version: '2.0'
|
||||
wf_direct:
|
||||
type: direct
|
||||
tasks:
|
||||
task1:
|
||||
action: std.noop
|
||||
on-complete:
|
||||
- task2
|
||||
task2:
|
||||
action: std.noop
|
||||
"""
|
||||
|
||||
BAD_DIRECT_WF = """
|
||||
---
|
||||
version: '2.0'
|
||||
wf_direct_bad:
|
||||
type: direct
|
||||
tasks:
|
||||
task1:
|
||||
action: std.noop
|
||||
task2:
|
||||
action: std.noop
|
||||
requires:
|
||||
- task1
|
||||
"""
|
||||
|
||||
REVERSE_WF = """
|
||||
---
|
||||
version: '2.0'
|
||||
wf_reverse:
|
||||
type: reverse
|
||||
tasks:
|
||||
task1:
|
||||
action: std.noop
|
||||
task2:
|
||||
action: std.noop
|
||||
requires:
|
||||
- task1
|
||||
"""
|
||||
|
||||
BAD_REVERSE_WF = """
|
||||
---
|
||||
version: '2.0'
|
||||
wf_reverse_bad:
|
||||
type: reverse
|
||||
tasks:
|
||||
task1:
|
||||
action: std.noop
|
||||
on-complete:
|
||||
- task2
|
||||
task2:
|
||||
action: std.noop
|
||||
"""
|
||||
|
||||
|
||||
# TODO(rakhmerov): Add more tests when v2 spec is complete.
|
||||
# TODO(rakhmerov): Add negative tests.
|
||||
|
||||
|
||||
class DSLv2ModelTest(base.BaseTest):
|
||||
def setUp(self):
|
||||
super(DSLv2ModelTest, self).setUp()
|
||||
class WorkbookSpecValidation(base.WorkbookSpecValidationTestCase):
|
||||
|
||||
def test_build_valid_workbook_spec(self):
|
||||
wb_spec = spec_parser.get_workbook_spec_from_yaml(VALID_WB)
|
||||
wb_spec = self._parse_dsl_spec(dsl_file='my_workbook.yaml')
|
||||
|
||||
# Workbook.
|
||||
act_specs = wb_spec.get_actions()
|
||||
@ -436,7 +223,7 @@ class DSLv2ModelTest(base.BaseTest):
|
||||
self.assertEqual('No-op task', task13_spec.get_description())
|
||||
|
||||
def test_adhoc_action_with_base_in_one_string(self):
|
||||
wb_spec = spec_parser.get_workbook_spec_from_yaml(VALID_WB)
|
||||
wb_spec = self._parse_dsl_spec(dsl_file='my_workbook.yaml')
|
||||
|
||||
act_specs = wb_spec.get_actions()
|
||||
action_spec = act_specs.get("action2")
|
||||
@ -445,24 +232,8 @@ class DSLv2ModelTest(base.BaseTest):
|
||||
self.assertEqual({'output': 'Echo output'},
|
||||
action_spec.get_base_input())
|
||||
|
||||
def test_invalid_with_items_spec(self):
|
||||
exc = self.assertRaises(
|
||||
exceptions.InvalidModelException,
|
||||
spec_parser.get_workbook_spec_from_yaml,
|
||||
INVALID_WB
|
||||
)
|
||||
self.assertIn("Wrong format of 'with-items'", str(exc))
|
||||
|
||||
def test_invalid_wf_spec(self):
|
||||
exc = self.assertRaises(
|
||||
exceptions.DSLParsingException,
|
||||
spec_parser.get_workflow_spec_from_yaml,
|
||||
INVALID_WF
|
||||
)
|
||||
self.assertIn("Definition could not be parsed", str(exc))
|
||||
|
||||
def test_to_dict(self):
|
||||
wb_spec = spec_parser.get_workbook_spec_from_yaml(VALID_WB)
|
||||
def test_spec_to_dict(self):
|
||||
wb_spec = self._parse_dsl_spec(dsl_file='my_workbook.yaml')
|
||||
|
||||
d = wb_spec.to_dict()
|
||||
|
||||
@ -470,38 +241,164 @@ class DSLv2ModelTest(base.BaseTest):
|
||||
self.assertEqual('2.0', d['workflows']['version'])
|
||||
self.assertEqual('2.0', d['workflows']['wf1']['version'])
|
||||
|
||||
def test_direct_workflow_task(self):
|
||||
wfs_spec = spec_parser.get_workflow_list_spec_from_yaml(DIRECT_WF)
|
||||
def test_version_required(self):
|
||||
dsl_dict = copy.deepcopy(self._dsl_blank)
|
||||
dsl_dict.pop('version', None)
|
||||
|
||||
self.assertEqual(1, len(wfs_spec.get_workflows()))
|
||||
self.assertEqual('wf_direct', wfs_spec.get_workflows()[0].get_name())
|
||||
self.assertEqual('direct', wfs_spec.get_workflows()[0].get_type())
|
||||
self.assertIsInstance(wfs_spec.get_workflows()[0].get_tasks(),
|
||||
tasks.DirectWfTaskSpecList)
|
||||
# TODO(m4dcoder): Check required property error when v1 is deprecated.
|
||||
# The version property is not required for v1 workbook whereas it is
|
||||
# a required property in v2. For backward compatibility, if no version
|
||||
# is not provided, the workbook spec parser defaults to v1 and the
|
||||
# required property exception is not triggered. However, a different
|
||||
# spec validation error returns due to drastically different schema
|
||||
# between workbook versions.
|
||||
self.assertRaises(exc.DSLParsingException,
|
||||
self._spec_parser,
|
||||
yaml.safe_dump(dsl_dict))
|
||||
|
||||
def test_direct_workflow_invalid_task(self):
|
||||
exception = self.assertRaises(
|
||||
exceptions.InvalidModelException,
|
||||
spec_parser.get_workflow_list_spec_from_yaml,
|
||||
BAD_DIRECT_WF
|
||||
)
|
||||
def test_version(self):
|
||||
tests = [
|
||||
({'version': None}, True),
|
||||
({'version': ''}, True),
|
||||
({'version': '1.0'}, True),
|
||||
({'version': '2.0'}, False),
|
||||
({'version': 2.0}, False),
|
||||
({'version': 2}, False)
|
||||
]
|
||||
|
||||
self.assertIn("Invalid DSL", exception.message)
|
||||
for version, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=version,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_reverse_workflow_task(self):
|
||||
wfs_spec = spec_parser.get_workflow_list_spec_from_yaml(REVERSE_WF)
|
||||
def test_name_required(self):
|
||||
dsl_dict = copy.deepcopy(self._dsl_blank)
|
||||
dsl_dict.pop('name', None)
|
||||
|
||||
self.assertEqual(1, len(wfs_spec.get_workflows()))
|
||||
self.assertEqual('wf_reverse', wfs_spec.get_workflows()[0].get_name())
|
||||
self.assertEqual('reverse', wfs_spec.get_workflows()[0].get_type())
|
||||
self.assertIsInstance(wfs_spec.get_workflows()[0].get_tasks(),
|
||||
tasks.ReverseWfTaskSpecList)
|
||||
exception = self.assertRaises(exc.DSLParsingException,
|
||||
self._spec_parser,
|
||||
yaml.safe_dump(dsl_dict))
|
||||
|
||||
def test_reverse_workflow_invalid_task(self):
|
||||
exception = self.assertRaises(
|
||||
exceptions.InvalidModelException,
|
||||
spec_parser.get_workflow_list_spec_from_yaml,
|
||||
BAD_REVERSE_WF
|
||||
)
|
||||
self.assertIn("'name' is a required property", exception.message)
|
||||
|
||||
self.assertIn("Invalid DSL", exception.message)
|
||||
def test_name(self):
|
||||
tests = [
|
||||
({'name': ''}, True),
|
||||
({'name': None}, True),
|
||||
({'name': 12345}, True),
|
||||
({'name': 'foobar'}, False)
|
||||
]
|
||||
|
||||
for name, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=name,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_description(self):
|
||||
tests = [
|
||||
({'description': ''}, True),
|
||||
({'description': None}, True),
|
||||
({'description': 12345}, True),
|
||||
({'description': 'This is a test workflow.'}, False)
|
||||
]
|
||||
|
||||
for description, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=description,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_tags(self):
|
||||
tests = [
|
||||
({'tags': ''}, True),
|
||||
({'tags': ['']}, True),
|
||||
({'tags': None}, True),
|
||||
({'tags': 12345}, True),
|
||||
({'tags': ['foo', 'bar']}, False),
|
||||
({'tags': ['foobar', 'foobar']}, True)
|
||||
]
|
||||
|
||||
for tags, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=tags,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_actions(self):
|
||||
actions = {
|
||||
'version': '2.0',
|
||||
'noop': {
|
||||
'base': 'std.noop'
|
||||
},
|
||||
'echo': {
|
||||
'base': 'std.echo'
|
||||
}
|
||||
}
|
||||
|
||||
tests = [
|
||||
({'actions': []}, True),
|
||||
({'actions': {}}, True),
|
||||
({'actions': None}, True),
|
||||
({'actions': {'version': None}}, True),
|
||||
({'actions': {'version': ''}}, True),
|
||||
({'actions': {'version': '1.0'}}, True),
|
||||
({'actions': {'version': '2.0'}}, False),
|
||||
({'actions': {'version': 2.0}}, False),
|
||||
({'actions': {'version': 2}}, False),
|
||||
({'actions': {'noop': actions['noop']}}, False),
|
||||
({'actions': {'version': '2.0', 'noop': 'std.noop'}}, True),
|
||||
({'actions': actions}, False)
|
||||
]
|
||||
|
||||
for adhoc_actions, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=adhoc_actions,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_workflows(self):
|
||||
workflows = {
|
||||
'version': '2.0',
|
||||
'wf1': {
|
||||
'tasks': {
|
||||
'noop': {
|
||||
'action': 'std.noop'
|
||||
}
|
||||
}
|
||||
},
|
||||
'wf2': {
|
||||
'tasks': {
|
||||
'echo': {
|
||||
'action': 'std.echo output="This is a test."'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tests = [
|
||||
({'workflows': []}, True),
|
||||
({'workflows': {}}, True),
|
||||
({'workflows': None}, True),
|
||||
({'workflows': {'version': None}}, True),
|
||||
({'workflows': {'version': ''}}, True),
|
||||
({'workflows': {'version': '1.0'}}, True),
|
||||
({'workflows': {'version': '2.0'}}, False),
|
||||
({'workflows': {'version': 2.0}}, False),
|
||||
({'workflows': {'version': 2}}, False),
|
||||
({'workflows': {'wf1': workflows['wf1']}}, False),
|
||||
({'workflows': {'version': '2.0', 'wf1': 'wf1'}}, True),
|
||||
({'workflows': workflows}, False)
|
||||
]
|
||||
|
||||
for workflows, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=workflows,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_triggers(self):
|
||||
tests = [
|
||||
({'triggers': []}, True),
|
||||
({'triggers': {}}, True),
|
||||
({'triggers': None}, True),
|
||||
({'triggers': {'version': None}}, True),
|
||||
({'triggers': {'version': ''}}, True),
|
||||
({'triggers': {'version': '1.0'}}, True),
|
||||
({'triggers': {'version': '2.0'}}, False),
|
||||
({'triggers': {'version': 2.0}}, False),
|
||||
({'triggers': {'version': 2}}, False)
|
||||
]
|
||||
|
||||
for triggers, expect_error in tests:
|
||||
self._parse_dsl_spec(changes=triggers,
|
||||
expect_error=expect_error)
|
247
mistral/tests/unit/workbook/v2/test_workflows.py
Normal file
247
mistral/tests/unit/workbook/v2/test_workflows.py
Normal file
@ -0,0 +1,247 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import yaml
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests.unit.workbook.v2 import base
|
||||
from mistral import utils
|
||||
from mistral.workbook.v2 import tasks
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowSpecValidation(base.WorkflowSpecValidationTestCase):
|
||||
|
||||
def test_workflow_types(self):
|
||||
tests = [
|
||||
({'type': 'direct'}, False),
|
||||
({'type': 'reverse'}, False),
|
||||
({'type': 'circular'}, True),
|
||||
({'type': None}, True)
|
||||
]
|
||||
|
||||
for wf_type, expect_error in tests:
|
||||
overlay = {'test': wf_type}
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_direct_workflow(self):
|
||||
overlay = {'test': {'type': 'direct', 'tasks': {}}}
|
||||
join = {'join': 'all'}
|
||||
on_success = {'on-success': ['email']}
|
||||
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'get': on_success})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'echo': on_success})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'email': join})
|
||||
|
||||
wfs_spec = self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=False)
|
||||
|
||||
self.assertEqual(1, len(wfs_spec.get_workflows()))
|
||||
self.assertEqual('test', wfs_spec.get_workflows()[0].get_name())
|
||||
self.assertEqual('direct', wfs_spec.get_workflows()[0].get_type())
|
||||
self.assertIsInstance(wfs_spec.get_workflows()[0].get_tasks(),
|
||||
tasks.DirectWfTaskSpecList)
|
||||
|
||||
def test_direct_workflow_invalid_task(self):
|
||||
overlay = {'test': {'type': 'direct', 'tasks': {}}}
|
||||
require = {'requires': ['echo', 'get']}
|
||||
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'email': require})
|
||||
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=True)
|
||||
|
||||
def test_reverse_workflow(self):
|
||||
overlay = {'test': {'type': 'reverse', 'tasks': {}}}
|
||||
require = {'requires': ['echo', 'get']}
|
||||
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'email': require})
|
||||
|
||||
wfs_spec = self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=False)
|
||||
|
||||
self.assertEqual(1, len(wfs_spec.get_workflows()))
|
||||
self.assertEqual('test', wfs_spec.get_workflows()[0].get_name())
|
||||
self.assertEqual('reverse', wfs_spec.get_workflows()[0].get_type())
|
||||
self.assertIsInstance(wfs_spec.get_workflows()[0].get_tasks(),
|
||||
tasks.ReverseWfTaskSpecList)
|
||||
|
||||
def test_reverse_workflow_invalid_task(self):
|
||||
overlay = {'test': {'type': 'reverse', 'tasks': {}}}
|
||||
join = {'join': 'all'}
|
||||
on_success = {'on-success': ['email']}
|
||||
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'get': on_success})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'echo': on_success})
|
||||
utils.merge_dicts(overlay['test']['tasks'], {'email': join})
|
||||
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=True)
|
||||
|
||||
def test_version_required(self):
|
||||
dsl_dict = copy.deepcopy(self._dsl_blank)
|
||||
dsl_dict.pop('version', None)
|
||||
|
||||
exception = self.assertRaises(exc.DSLParsingException,
|
||||
self._spec_parser,
|
||||
yaml.safe_dump(dsl_dict))
|
||||
|
||||
self.assertIn("'version' is a required property", exception.message)
|
||||
|
||||
def test_version(self):
|
||||
tests = [
|
||||
({'version': None}, True),
|
||||
({'version': ''}, True),
|
||||
({'version': '2.0'}, False),
|
||||
({'version': 2.0}, False),
|
||||
({'version': 2}, False)
|
||||
]
|
||||
|
||||
for version, expect_error in tests:
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=version,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_inputs(self):
|
||||
tests = [
|
||||
({'input': ['var1', 'var2']}, False),
|
||||
({'input': ['var1', 'var1']}, True),
|
||||
({'input': [12345]}, True),
|
||||
({'input': [None]}, True),
|
||||
({'input': ['']}, True),
|
||||
({'input': None}, True),
|
||||
({'input': []}, True)
|
||||
]
|
||||
|
||||
for wf_input, expect_error in tests:
|
||||
overlay = {'test': wf_input}
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_outputs(self):
|
||||
tests = [
|
||||
({'output': {'k1': 'a', 'k2': 1, 'k3': True, 'k4': None}}, False),
|
||||
({'output': []}, True),
|
||||
({'output': 'whatever'}, True),
|
||||
({'output': None}, True),
|
||||
({'output': {}}, True)
|
||||
]
|
||||
|
||||
for wf_output, expect_error in tests:
|
||||
overlay = {'test': wf_output}
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_tasks_required(self):
|
||||
exception = self._parse_dsl_spec(add_tasks=False,
|
||||
expect_error=True)
|
||||
|
||||
self.assertIn("'tasks' is a required property", exception.message)
|
||||
|
||||
def test_tasks(self):
|
||||
tests = [
|
||||
({'tasks': {}}, True),
|
||||
({'tasks': None}, True),
|
||||
({'tasks': self._dsl_tasks}, False)
|
||||
]
|
||||
|
||||
for wf_tasks, expect_error in tests:
|
||||
overlay = {'test': wf_tasks}
|
||||
self._parse_dsl_spec(add_tasks=False,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
||||
|
||||
def test_task_defaults(self):
|
||||
tests = [
|
||||
({'on-success': ['email']}, False),
|
||||
({'on-success': [{'email': '<% 1 %>'}]}, False),
|
||||
({'on-success': [{'email': '<% 1 %>'}, 'echo']}, False),
|
||||
({'on-success': 'email'}, True),
|
||||
({'on-success': None}, True),
|
||||
({'on-success': ['']}, True),
|
||||
({'on-success': []}, True),
|
||||
({'on-success': ['email', 'email']}, True),
|
||||
({'on-success': ['email', 12345]}, True),
|
||||
({'on-error': ['email']}, False),
|
||||
({'on-error': [{'email': '<% 1 %>'}]}, False),
|
||||
({'on-error': [{'email': '<% 1 %>'}, 'echo']}, False),
|
||||
({'on-error': 'email'}, True),
|
||||
({'on-error': None}, True),
|
||||
({'on-error': ['']}, True),
|
||||
({'on-error': []}, True),
|
||||
({'on-error': ['email', 'email']}, True),
|
||||
({'on-error': ['email', 12345]}, True),
|
||||
({'on-complete': ['email']}, False),
|
||||
({'on-complete': [{'email': '<% 1 %>'}]}, False),
|
||||
({'on-complete': [{'email': '<% 1 %>'}, 'echo']}, False),
|
||||
({'on-complete': 'email'}, True),
|
||||
({'on-complete': None}, True),
|
||||
({'on-complete': ['']}, True),
|
||||
({'on-complete': []}, True),
|
||||
({'on-complete': ['email', 'email']}, True),
|
||||
({'on-complete': ['email', 12345]}, True),
|
||||
({'policies': {'retry': {'count': 3, 'delay': 1}}}, False),
|
||||
({'policies': {'retry': {'count': '<% 3 %>', 'delay': 1}}},
|
||||
False),
|
||||
({'policies': {'retry': {'count': 3, 'delay': '<% 1 %>'}}},
|
||||
False),
|
||||
({'policies': {'retry': {'count': -3, 'delay': 1}}}, True),
|
||||
({'policies': {'retry': {'count': 3, 'delay': -1}}}, True),
|
||||
({'policies': {'retry': {'count': '3', 'delay': 1}}}, True),
|
||||
({'policies': {'retry': {'count': 3, 'delay': '1'}}}, True),
|
||||
({'policies': {'retry': None}}, True),
|
||||
({'policies': {'wait-before': 1}}, False),
|
||||
({'policies': {'wait-before': '<% 1 %>'}}, False),
|
||||
({'policies': {'wait-before': -1}}, True),
|
||||
({'policies': {'wait-before': 1.0}}, True),
|
||||
({'policies': {'wait-before': '1'}}, True),
|
||||
({'policies': {'wait-after': 1}}, False),
|
||||
({'policies': {'wait-after': '<% 1 %>'}}, False),
|
||||
({'policies': {'wait-after': -1}}, True),
|
||||
({'policies': {'wait-after': 1.0}}, True),
|
||||
({'policies': {'wait-after': '1'}}, True),
|
||||
({'policies': {'timeout': 300}}, False),
|
||||
({'policies': {'timeout': '<% 300 %>'}}, False),
|
||||
({'policies': {'timeout': -300}}, True),
|
||||
({'policies': {'timeout': 300.0}}, True),
|
||||
({'policies': {'timeout': '300'}}, True),
|
||||
({'policies': {'pause-before': False}}, False),
|
||||
({'policies': {'pause-before': '<% False %>'}}, False),
|
||||
({'policies': {'pause-before': 'False'}}, True),
|
||||
({'policies': {'concurrency': 10}}, False),
|
||||
({'policies': {'concurrency': '<% 10 %>'}}, False),
|
||||
({'policies': {'concurrency': -10}}, True),
|
||||
({'policies': {'concurrency': 10.0}}, True),
|
||||
({'policies': {'concurrency': '10'}}, True)
|
||||
]
|
||||
|
||||
for default, expect_error in tests:
|
||||
overlay = {'test': {'task-defaults': {}}}
|
||||
utils.merge_dicts(overlay['test']['task-defaults'], default)
|
||||
self._parse_dsl_spec(add_tasks=True,
|
||||
changes=overlay,
|
||||
expect_error=expect_error)
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
import jsonschema
|
||||
import re
|
||||
@ -19,6 +21,8 @@ import six
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral import expressions as expr
|
||||
from mistral import utils
|
||||
from mistral.workbook import types
|
||||
|
||||
|
||||
CMD_PTRN = re.compile("^[\w\.]+[^=\s\"]*")
|
||||
@ -43,20 +47,39 @@ PARAMS_PTRN = re.compile("([\w]+)=(%s)" % "|".join(ALL))
|
||||
class BaseSpec(object):
|
||||
# See http://json-schema.org
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"type": "object"
|
||||
}
|
||||
|
||||
_yaql_schema = {
|
||||
"definitions": {
|
||||
"yaql": {
|
||||
"type": "string",
|
||||
"pattern": "^<%.*?%>\\s*$"
|
||||
},
|
||||
}
|
||||
_meta_schema = {
|
||||
"type": "object"
|
||||
}
|
||||
|
||||
_definitions = {}
|
||||
|
||||
_version = "1.0"
|
||||
|
||||
@classmethod
|
||||
def get_schema(cls, includes=['meta', 'definitions']):
|
||||
schema = copy.deepcopy(cls._schema)
|
||||
|
||||
schema['properties'] = utils.merge_dicts(
|
||||
schema.get('properties', {}),
|
||||
cls._meta_schema.get('properties', {}),
|
||||
overwrite=False)
|
||||
|
||||
if includes and 'meta' in includes:
|
||||
schema['required'] = list(
|
||||
set(schema.get('required', []) +
|
||||
cls._meta_schema.get('required', [])))
|
||||
|
||||
if includes and 'definitions' in includes:
|
||||
schema['definitions'] = utils.merge_dicts(
|
||||
schema.get('definitions', {}),
|
||||
cls._definitions,
|
||||
overwrite=False)
|
||||
|
||||
return schema
|
||||
|
||||
def __init__(self, data):
|
||||
self._data = data
|
||||
|
||||
@ -64,7 +87,7 @@ class BaseSpec(object):
|
||||
|
||||
def validate(self):
|
||||
try:
|
||||
jsonschema.validate(self._data, self._schema)
|
||||
jsonschema.validate(self._data, self.get_schema())
|
||||
except jsonschema.ValidationError as e:
|
||||
raise exc.InvalidModelException("Invalid DSL: %s" % e)
|
||||
|
||||
@ -145,6 +168,41 @@ class BaseSpec(object):
|
||||
return "%s %s" % (self.__class__.__name__, self.to_dict())
|
||||
|
||||
|
||||
class BaseListSpec(BaseSpec):
|
||||
item_class = None
|
||||
|
||||
_meta_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": types.VERSION
|
||||
},
|
||||
"required": ["version"]
|
||||
}
|
||||
|
||||
def __init__(self, data):
|
||||
super(BaseListSpec, self).__init__(data)
|
||||
|
||||
self.items = []
|
||||
|
||||
for k, v in data.iteritems():
|
||||
if k != 'version':
|
||||
v['name'] = k
|
||||
self._inject_version([k])
|
||||
self.items.append(self.item_class(v))
|
||||
|
||||
def validate(self):
|
||||
super(BaseListSpec, self).validate()
|
||||
|
||||
if len(self._data.keys()) < 2:
|
||||
raise exc.InvalidModelException(
|
||||
'At least one item must be in the list [data=%s].' %
|
||||
self._data
|
||||
)
|
||||
|
||||
def get_items(self):
|
||||
return self.items
|
||||
|
||||
|
||||
class BaseSpecList(object):
|
||||
item_class = None
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -54,7 +55,7 @@ def _get_spec_version(spec_dict):
|
||||
if 'version' in spec_dict:
|
||||
ver = spec_dict['version']
|
||||
|
||||
if str(ver) not in ALL_VERSIONS:
|
||||
if not ver or str(float(ver)) not in ALL_VERSIONS:
|
||||
raise exc.DSLParsingException('Unsupported DSL version: %s' % ver)
|
||||
|
||||
return ver
|
||||
@ -136,7 +137,14 @@ def get_task_spec(spec_dict):
|
||||
if _get_spec_version(spec_dict) == V1_0:
|
||||
return tasks_v1.TaskSpec(spec_dict)
|
||||
else:
|
||||
return tasks_v2.TaskSpec(spec_dict)
|
||||
workflow_type = spec_dict.get('type')
|
||||
|
||||
if workflow_type == 'direct':
|
||||
return tasks_v2.DirectWorkflowTaskSpec(spec_dict)
|
||||
elif workflow_type == 'reverse':
|
||||
return tasks_v2.ReverseWorkflowTaskSpec(spec_dict)
|
||||
else:
|
||||
raise Exception('Unsupported workflow type "%s".' % workflow_type)
|
||||
|
||||
|
||||
def get_trigger_spec(spec_dict):
|
||||
|
107
mistral/workbook/types.py
Normal file
107
mistral/workbook/types.py
Normal file
@ -0,0 +1,107 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
NONEMPTY_STRING = {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
}
|
||||
|
||||
UNIQUE_STRING_LIST = {
|
||||
"type": "array",
|
||||
"items": NONEMPTY_STRING,
|
||||
"uniqueItems": True,
|
||||
"minItems": 1
|
||||
}
|
||||
|
||||
POSITIVE_INTEGER = {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
|
||||
POSITIVE_NUMBER = {
|
||||
"type": "number",
|
||||
"minimum": 0.0
|
||||
}
|
||||
|
||||
YAQL = {
|
||||
"type": "string",
|
||||
"pattern": "^<%.*?%>\\s*$"
|
||||
}
|
||||
|
||||
YAQL_CONDITION = {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"patternProperties": {
|
||||
"^\w+$": YAQL
|
||||
}
|
||||
}
|
||||
|
||||
ANY = {
|
||||
"anyOf": [
|
||||
{"type": "array"},
|
||||
{"type": "boolean"},
|
||||
{"type": "integer"},
|
||||
{"type": "number"},
|
||||
{"type": "object"},
|
||||
{"type": "string"},
|
||||
YAQL
|
||||
]
|
||||
}
|
||||
|
||||
ANY_NULLABLE = {
|
||||
"anyOf": [
|
||||
{"type": "null"},
|
||||
{"type": "array"},
|
||||
{"type": "boolean"},
|
||||
{"type": "integer"},
|
||||
{"type": "number"},
|
||||
{"type": "object"},
|
||||
{"type": "string"},
|
||||
YAQL
|
||||
]
|
||||
}
|
||||
|
||||
NONEMPTY_DICT = {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"patternProperties": {
|
||||
"^\w+$": ANY_NULLABLE
|
||||
}
|
||||
}
|
||||
|
||||
STRING_OR_YAQL_CONDITION = {
|
||||
"oneOf": [
|
||||
NONEMPTY_STRING,
|
||||
YAQL_CONDITION
|
||||
]
|
||||
}
|
||||
|
||||
UNIQUE_STRING_OR_YAQL_CONDITION_LIST = {
|
||||
"type": "array",
|
||||
"items": STRING_OR_YAQL_CONDITION,
|
||||
"uniqueItems": True,
|
||||
"minItems": 1
|
||||
}
|
||||
|
||||
VERSION = {
|
||||
"anyOf": [
|
||||
NONEMPTY_STRING,
|
||||
POSITIVE_INTEGER,
|
||||
POSITIVE_NUMBER
|
||||
]
|
||||
}
|
||||
|
||||
WORKFLOW_TYPE = {
|
||||
"enum": ["reverse", "direct"]
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,9 +13,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral import utils
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
from mistral.workbook.v2 import base
|
||||
|
||||
|
||||
class ActionSpec(base.BaseSpec):
|
||||
@ -22,21 +23,15 @@ class ActionSpec(base.BaseSpec):
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
"description": {"type": "string"},
|
||||
"tags": {"type": "array"},
|
||||
"base": {"type": "string"},
|
||||
"base-input": {"type": "object"},
|
||||
"input": {"type": "array"},
|
||||
"output": {"type": ["string", "object", "array", "null"]},
|
||||
"base": types.NONEMPTY_STRING,
|
||||
"base-input": types.NONEMPTY_DICT,
|
||||
"input": types.UNIQUE_STRING_LIST,
|
||||
"output": types.ANY_NULLABLE,
|
||||
},
|
||||
"required": ["version", "name", "base"],
|
||||
"required": ["base"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(ActionSpec, self).__init__(data)
|
||||
|
||||
@ -76,42 +71,10 @@ class ActionSpec(base.BaseSpec):
|
||||
|
||||
class ActionSpecList(base.BaseSpecList):
|
||||
item_class = ActionSpec
|
||||
_version = '2.0'
|
||||
|
||||
|
||||
class ActionListSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
},
|
||||
"required": ["version"],
|
||||
"additionalProperties": True
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(ActionListSpec, self).__init__(data)
|
||||
|
||||
self._actions = []
|
||||
|
||||
for k, v in data.iteritems():
|
||||
if k == 'version':
|
||||
continue
|
||||
|
||||
v['name'] = k
|
||||
self._inject_version([k])
|
||||
|
||||
self._actions.append(ActionSpec(v))
|
||||
|
||||
def validate(self):
|
||||
if len(self._data.keys()) < 2:
|
||||
raise exc.InvalidModelException(
|
||||
'At least one action must be in action list [data=%s]' %
|
||||
self._data
|
||||
)
|
||||
class ActionListSpec(base.BaseListSpec):
|
||||
item_class = ActionSpec
|
||||
|
||||
def get_actions(self):
|
||||
return self._actions
|
||||
return self.get_items()
|
||||
|
39
mistral/workbook/v2/base.py
Normal file
39
mistral/workbook/v2/base.py
Normal file
@ -0,0 +1,39 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
|
||||
|
||||
class BaseSpec(base.BaseSpec):
|
||||
_version = "2.0"
|
||||
|
||||
_meta_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": types.NONEMPTY_STRING,
|
||||
"version": types.VERSION,
|
||||
"description": types.NONEMPTY_STRING,
|
||||
"tags": types.UNIQUE_STRING_LIST
|
||||
},
|
||||
"required": ["name", "version"]
|
||||
}
|
||||
|
||||
|
||||
class BaseSpecList(base.BaseSpecList):
|
||||
_version = "2.0"
|
||||
|
||||
|
||||
class BaseListSpec(base.BaseListSpec):
|
||||
_version = "2.0"
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,40 +13,36 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import utils
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
from mistral.workbook.v2 import base
|
||||
|
||||
|
||||
class RetrySpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
_retry_schema = {
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
{"$ref": "#/definitions/positiveInteger"}
|
||||
types.YAQL,
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
"break-on": {"$ref": "#/definitions/yaql"},
|
||||
"break-on": types.YAQL,
|
||||
"delay": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
{"$ref": "#/definitions/positiveInteger"}
|
||||
types.YAQL,
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
},
|
||||
"required": ["count", "delay"],
|
||||
"additionalProperties": False,
|
||||
"definitions": {
|
||||
"positiveInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
}
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_schema = utils.merge_dicts(_retry_schema, base.BaseSpec._yaql_schema)
|
||||
@classmethod
|
||||
def get_schema(cls, includes=['definitions']):
|
||||
return super(RetrySpec, cls).get_schema(includes)
|
||||
|
||||
def __init__(self, data):
|
||||
super(RetrySpec, self).__init__(data)
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,26 +13,30 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
from mistral.workbook.v2 import base
|
||||
from mistral.workbook.v2 import task_policies
|
||||
|
||||
|
||||
class TaskDefaultsSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
_task_policies_schema = task_policies.TaskPoliciesSpec.get_schema(
|
||||
includes=None)
|
||||
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
"policies": {"type": ["object", "null"]},
|
||||
"on-complete": {"type": ["array", "null"]},
|
||||
"on-success": {"type": ["array", "null"]},
|
||||
"on-error": {"type": ["array", "null"]},
|
||||
"policies": _task_policies_schema,
|
||||
"on-complete": types.UNIQUE_STRING_OR_YAQL_CONDITION_LIST,
|
||||
"on-success": types.UNIQUE_STRING_OR_YAQL_CONDITION_LIST,
|
||||
"on-error": types.UNIQUE_STRING_OR_YAQL_CONDITION_LIST
|
||||
},
|
||||
"required": ["version"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
@classmethod
|
||||
def get_schema(cls, includes=['definitions']):
|
||||
return super(TaskDefaultsSpec, cls).get_schema(includes)
|
||||
|
||||
def __init__(self, data):
|
||||
super(TaskDefaultsSpec, self).__init__(data)
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,58 +13,57 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import utils
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
from mistral.workbook.v2 import base
|
||||
from mistral.workbook.v2 import retry_policy
|
||||
|
||||
|
||||
class TaskPoliciesSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
_policies_schema = {
|
||||
_retry_policy_schema = retry_policy.RetrySpec.get_schema(
|
||||
includes=None)
|
||||
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"retry": {"type": ["object", "null"]},
|
||||
"retry": _retry_policy_schema,
|
||||
"wait-before": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
{"$ref": "#/definitions/positiveInteger"}
|
||||
types.YAQL,
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
"wait-after": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
{"$ref": "#/definitions/positiveInteger"}
|
||||
types.YAQL,
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
"timeout": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
{"$ref": "#/definitions/positiveInteger"}
|
||||
types.YAQL,
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
"pause-before": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
types.YAQL,
|
||||
{"type": "boolean"}
|
||||
]
|
||||
},
|
||||
"concurrency": {
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/yaql"},
|
||||
{"$ref": "#/definitions/positiveInteger"}
|
||||
types.YAQL,
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
"definitions": {
|
||||
"positiveInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
},
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_schema = utils.merge_dicts(_policies_schema, base.BaseSpec._yaql_schema)
|
||||
@classmethod
|
||||
def get_schema(cls, includes=['definitions']):
|
||||
return super(TaskPoliciesSpec, cls).get_schema(includes)
|
||||
|
||||
def __init__(self, data):
|
||||
super(TaskPoliciesSpec, self).__init__(data)
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -17,37 +18,45 @@ import re
|
||||
import six
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral import expressions as expr
|
||||
from mistral import utils
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
from mistral.workbook.v2 import base
|
||||
from mistral.workbook.v2 import task_policies
|
||||
|
||||
|
||||
WITH_ITEMS_PTRN = re.compile(
|
||||
"\s*([\w\d_\-]+)\s*in\s*(\[.+\]|%s)" % base.INLINE_YAQL
|
||||
"\s*([\w\d_\-]+)\s*in\s*(\[.+\]|%s)" % expr.INLINE_YAQL_REGEXP
|
||||
)
|
||||
|
||||
|
||||
class TaskSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
_type = None
|
||||
|
||||
_task_policies_schema = task_policies.TaskPoliciesSpec.get_schema(
|
||||
includes=None)
|
||||
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
"description": {"type": "string"},
|
||||
"action": {"type": ["string", "null"]},
|
||||
"workflow": {"type": ["string", "null"]},
|
||||
"input": {"type": ["object", "null"]},
|
||||
"with-items": {"type": ["string", "array", "null"]},
|
||||
"publish": {"type": ["object", "null"]},
|
||||
"policies": {"type": ["object", "null"]},
|
||||
"target": {"type": ["string", "null"]},
|
||||
"type": types.WORKFLOW_TYPE,
|
||||
"action": types.NONEMPTY_STRING,
|
||||
"workflow": types.NONEMPTY_STRING,
|
||||
"input": types.NONEMPTY_DICT,
|
||||
"with-items": {
|
||||
"oneOf": [
|
||||
types.NONEMPTY_STRING,
|
||||
types.UNIQUE_STRING_LIST
|
||||
]
|
||||
},
|
||||
"publish": types.NONEMPTY_DICT,
|
||||
"policies": _task_policies_schema,
|
||||
"target": types.NONEMPTY_STRING
|
||||
},
|
||||
"required": ["version", "name"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(TaskSpec, self).__init__(data)
|
||||
|
||||
@ -64,6 +73,7 @@ class TaskSpec(base.BaseSpec):
|
||||
)
|
||||
self._target = data.get('target')
|
||||
|
||||
self._inject_type()
|
||||
self._process_action_and_workflow()
|
||||
|
||||
def validate(self):
|
||||
@ -104,6 +114,10 @@ class TaskSpec(base.BaseSpec):
|
||||
|
||||
return with_items
|
||||
|
||||
def _inject_type(self):
|
||||
if self._type:
|
||||
self._data['type'] = self._type
|
||||
|
||||
def _process_action_and_workflow(self):
|
||||
params = {}
|
||||
|
||||
@ -123,6 +137,9 @@ class TaskSpec(base.BaseSpec):
|
||||
def get_description(self):
|
||||
return self._description
|
||||
|
||||
def get_type(self):
|
||||
return self._type
|
||||
|
||||
def get_action_name(self):
|
||||
return self._action if self._action else None
|
||||
|
||||
@ -146,18 +163,26 @@ class TaskSpec(base.BaseSpec):
|
||||
|
||||
|
||||
class DirectWorkflowTaskSpec(TaskSpec):
|
||||
_direct_props = {
|
||||
_type = 'direct'
|
||||
|
||||
_direct_workflow_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"join": {"type": ["string", "integer"]},
|
||||
"on-complete": {"type": ["array", "null"]},
|
||||
"on-success": {"type": ["array", "null"]},
|
||||
"on-error": {"type": ["array", "null"]}
|
||||
},
|
||||
"additionalProperties": False
|
||||
"type": {"enum": [_type]},
|
||||
"join": {
|
||||
"oneOf": [
|
||||
{"enum": ["all", "one"]},
|
||||
types.POSITIVE_INTEGER
|
||||
]
|
||||
},
|
||||
"on-complete": types.UNIQUE_STRING_OR_YAQL_CONDITION_LIST,
|
||||
"on-success": types.UNIQUE_STRING_OR_YAQL_CONDITION_LIST,
|
||||
"on-error": types.UNIQUE_STRING_OR_YAQL_CONDITION_LIST
|
||||
}
|
||||
}
|
||||
|
||||
_schema = utils.merge_dicts(copy.deepcopy(TaskSpec._schema),
|
||||
_direct_props)
|
||||
_direct_workflow_schema)
|
||||
|
||||
def __init__(self, data):
|
||||
super(DirectWorkflowTaskSpec, self).__init__(data)
|
||||
@ -192,15 +217,18 @@ class DirectWorkflowTaskSpec(TaskSpec):
|
||||
|
||||
|
||||
class ReverseWorkflowTaskSpec(TaskSpec):
|
||||
_reverse_props = {
|
||||
_type = 'reverse'
|
||||
|
||||
_reverse_workflow_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"requires": {"type": ["string", "array", "null"]}
|
||||
},
|
||||
"additionalProperties": False
|
||||
"type": {"enum": [_type]},
|
||||
"requires": types.UNIQUE_STRING_LIST
|
||||
}
|
||||
}
|
||||
|
||||
_schema = utils.merge_dicts(copy.deepcopy(TaskSpec._schema),
|
||||
_reverse_props)
|
||||
_reverse_workflow_schema)
|
||||
|
||||
def __init__(self, data):
|
||||
super(ReverseWorkflowTaskSpec, self).__init__(data)
|
||||
@ -216,7 +244,6 @@ class ReverseWorkflowTaskSpec(TaskSpec):
|
||||
|
||||
class TaskSpecList(base.BaseSpecList):
|
||||
item_class = TaskSpec
|
||||
_version = '2.0'
|
||||
|
||||
@staticmethod
|
||||
def get_class(wf_type):
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,13 +13,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook.v2 import base
|
||||
|
||||
|
||||
# TODO(rakhmerov): In progress.
|
||||
|
||||
|
||||
class TriggerSpec(base.BaseSpec):
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(TriggerSpec, self).__init__(data)
|
||||
@ -27,4 +28,3 @@ class TriggerSpec(base.BaseSpec):
|
||||
|
||||
class TriggerSpecList(base.BaseSpecList):
|
||||
item_class = TriggerSpec
|
||||
_version = '2.0'
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,31 +13,53 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook.v2 import actions as act
|
||||
from mistral.workbook.v2 import base
|
||||
from mistral.workbook.v2 import triggers as tr
|
||||
from mistral.workbook.v2 import workflows as wf
|
||||
|
||||
|
||||
class WorkbookSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
|
||||
_action_schema = act.ActionSpec.get_schema(includes=None)
|
||||
|
||||
_workflow_schema = wf.WorkflowSpec.get_schema(includes=None)
|
||||
|
||||
_trigger_schema = tr.TriggerSpec.get_schema(includes=None)
|
||||
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"value": "2.0"},
|
||||
"name": {"type": "string"},
|
||||
"description": {"type": "string"},
|
||||
"tags": {"type": "array"},
|
||||
"actions": {"type": "object"},
|
||||
"workflows": {"type": "object"},
|
||||
"triggers": {"type": "object"}
|
||||
"version": {"enum": ["2.0", 2.0]},
|
||||
"actions": {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"patternProperties": {
|
||||
"version": {"enum": ["2.0", 2.0]},
|
||||
"^(?!version)\w+$": _action_schema
|
||||
}
|
||||
},
|
||||
"workflows": {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"patternProperties": {
|
||||
"version": {"enum": ["2.0", 2.0]},
|
||||
"^(?!version)\w+$": _workflow_schema
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"patternProperties": {
|
||||
"version": {"enum": ["2.0", 2.0]},
|
||||
"^(?!version)\w+$": _trigger_schema
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["name"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(WorkbookSpec, self).__init__(data)
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -13,37 +14,51 @@
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral.workbook import base
|
||||
from mistral.workbook import types
|
||||
from mistral.workbook.v2 import base
|
||||
from mistral.workbook.v2 import task_defaults
|
||||
from mistral.workbook.v2 import tasks
|
||||
|
||||
|
||||
class WorkflowSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
|
||||
_direct_task_schema = tasks.DirectWorkflowTaskSpec.get_schema(
|
||||
includes=None)
|
||||
|
||||
_reverse_task_schema = tasks.ReverseWorkflowTaskSpec.get_schema(
|
||||
includes=None)
|
||||
|
||||
_task_defaults_schema = task_defaults.TaskDefaultsSpec.get_schema(
|
||||
includes=None)
|
||||
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
"description": {"type": "string"},
|
||||
"tags": {"type": "array"},
|
||||
"type": {"enum": ["reverse", "direct"]},
|
||||
"task-defaults": {"type": "object"},
|
||||
"input": {"type": ["array", "null"]},
|
||||
"output": {"type": ["string", "object", "array", "null"]},
|
||||
"tasks": {"type": "object"},
|
||||
"type": types.WORKFLOW_TYPE,
|
||||
"task-defaults": _task_defaults_schema,
|
||||
"input": types.UNIQUE_STRING_LIST,
|
||||
"output": types.NONEMPTY_DICT,
|
||||
"tasks": {
|
||||
"type": "object",
|
||||
"minProperties": 1,
|
||||
"patternProperties": {
|
||||
"^\w+$": {
|
||||
"anyOf": [
|
||||
_direct_task_schema,
|
||||
_reverse_task_schema
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"required": ["version", "name", "tasks"],
|
||||
"required": ["tasks"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(WorkflowSpec, self).__init__(data)
|
||||
|
||||
self._inject_version(['task-defaults'])
|
||||
|
||||
self._name = data['name']
|
||||
self._description = data.get('description')
|
||||
self._tags = data.get('tags', [])
|
||||
@ -95,51 +110,10 @@ class WorkflowSpec(base.BaseSpec):
|
||||
|
||||
class WorkflowSpecList(base.BaseSpecList):
|
||||
item_class = WorkflowSpec
|
||||
_version = '2.0'
|
||||
|
||||
|
||||
class WorkflowListSpec(base.BaseSpec):
|
||||
# See http://json-schema.org
|
||||
_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
},
|
||||
"required": ["version"],
|
||||
"additionalProperties": True
|
||||
}
|
||||
|
||||
_version = '2.0'
|
||||
|
||||
def __init__(self, data):
|
||||
super(WorkflowListSpec, self).__init__(data)
|
||||
|
||||
self._workflows = []
|
||||
|
||||
for k, v in data.iteritems():
|
||||
if k == 'version':
|
||||
continue
|
||||
|
||||
if not isinstance(v, dict):
|
||||
raise exc.InvalidModelException(
|
||||
"Invalid workflow definition. Please make sure your "
|
||||
"workflow matches a dictionary type and there is no "
|
||||
"typo in keyword 'version'"
|
||||
)
|
||||
|
||||
v['name'] = k
|
||||
self._inject_version([k])
|
||||
|
||||
self._workflows.append(WorkflowSpec(v))
|
||||
|
||||
def validate(self):
|
||||
super(WorkflowListSpec, self).validate()
|
||||
|
||||
if len(self._data.keys()) < 2:
|
||||
raise exc.InvalidModelException(
|
||||
'At least one workflow must be in workflow list [data=%s]' %
|
||||
self._data
|
||||
)
|
||||
class WorkflowListSpec(base.BaseListSpec):
|
||||
item_class = WorkflowSpec
|
||||
|
||||
def get_workflows(self):
|
||||
return self._workflows
|
||||
return self.get_items()
|
||||
|
Loading…
Reference in New Issue
Block a user