Deleting all v1 related stuff
* Old engine * Endpoints * Tests * Fixed launch.cmd script * Fixed the bug with incorrect RPC objects creation in tests Change-Id: I8579d0878e8a295823124d3c3452f3c26151d43a
This commit is contained in:
parent
a95d2e430a
commit
b38328513a
@ -5,4 +5,3 @@ REST API
|
||||
:maxdepth: 2
|
||||
|
||||
v2
|
||||
v1
|
||||
|
@ -1,58 +0,0 @@
|
||||
V1 API (deprecated)
|
||||
===================
|
||||
|
||||
.. autotype:: mistral.api.controllers.resource.Link
|
||||
:members:
|
||||
|
||||
Workbooks
|
||||
---------
|
||||
|
||||
.. rest-controller:: mistral.api.controllers.v1.workbook:WorkbooksController
|
||||
:webprefix: /v1/workbooks
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.workbook.Workbooks
|
||||
:members:
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.workbook.Workbook
|
||||
:members:
|
||||
|
||||
Workbook Definitions
|
||||
--------------------
|
||||
|
||||
.. rest-controller:: mistral.api.controllers.v1.workbook_definition:WorkbookDefinitionController
|
||||
:webprefix: /v1/workbooks/definition
|
||||
|
||||
Executions
|
||||
----------
|
||||
.. rest-controller:: mistral.api.controllers.v1.execution:ExecutionsController
|
||||
:webprefix: /v1/workbooks/executions
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.execution.Executions
|
||||
:members:
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.execution.Execution
|
||||
:members:
|
||||
|
||||
Listeners
|
||||
---------
|
||||
.. rest-controller:: mistral.api.controllers.v1.listener:ListenersController
|
||||
:webprefix: /v1/workbooks/listeners
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.listener.Listeners
|
||||
:members:
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.listener.Listener
|
||||
:members:
|
||||
|
||||
|
||||
Tasks
|
||||
-----
|
||||
|
||||
.. rest-controller:: mistral.api.controllers.v1.task:TasksController
|
||||
:webprefix: /v1/workbooks/executions/tasks
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.task.Tasks
|
||||
:members:
|
||||
|
||||
.. autotype:: mistral.api.controllers.v1.task.Task
|
||||
:members:
|
@ -18,9 +18,7 @@ from oslo.config import cfg
|
||||
import pecan
|
||||
|
||||
from mistral.api import access_control
|
||||
from mistral.api.hooks import engine
|
||||
from mistral import context as ctx
|
||||
from mistral.db.v1 import api as db_api_v1
|
||||
from mistral.db.v2 import api as db_api_v2
|
||||
from mistral.services import periodic
|
||||
|
||||
@ -41,21 +39,19 @@ def get_pecan_config():
|
||||
return pecan.configuration.conf_from_dict(cfg_dict)
|
||||
|
||||
|
||||
def setup_app(config=None, transport=None):
|
||||
def setup_app(config=None):
|
||||
if not config:
|
||||
config = get_pecan_config()
|
||||
|
||||
app_conf = dict(config.app)
|
||||
|
||||
db_api_v1.setup_db()
|
||||
db_api_v2.setup_db()
|
||||
|
||||
periodic.setup(transport)
|
||||
periodic.setup()
|
||||
|
||||
app = pecan.make_app(
|
||||
app_conf.pop('root'),
|
||||
hooks=lambda: [ctx.ContextHook(),
|
||||
engine.EngineHook(transport=transport)],
|
||||
hooks=lambda: [ctx.ContextHook()],
|
||||
logging=getattr(config, 'logging', {}),
|
||||
**app_conf
|
||||
)
|
||||
|
@ -19,7 +19,6 @@ from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v1 import root as v1_root
|
||||
from mistral.api.controllers.v2 import root as v2_root
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
@ -42,29 +41,28 @@ class APIVersion(resource.Resource):
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='v1.0',
|
||||
status='CURRENT',
|
||||
link=resource.Link(target_name='v1',
|
||||
href='http://example.com:9777/v1'))
|
||||
return cls(
|
||||
id='v1.0',
|
||||
status='CURRENT',
|
||||
link=resource.Link(
|
||||
target_name='v1',
|
||||
href='http://example.com:9777/v1'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class RootController(object):
|
||||
|
||||
v1 = v1_root.Controller()
|
||||
v2 = v2_root.Controller()
|
||||
|
||||
@wsme_pecan.wsexpose([APIVersion])
|
||||
def index(self):
|
||||
LOG.debug("Fetching API versions.")
|
||||
|
||||
host_url_v1 = '%s/%s' % (pecan.request.host_url, 'v1')
|
||||
api_v1 = APIVersion(id='v1.0',
|
||||
status='SUPPORTED',
|
||||
link=resource.Link(href=host_url_v1, target='v1'))
|
||||
|
||||
host_url_v2 = '%s/%s' % (pecan.request.host_url, 'v2')
|
||||
api_v2 = APIVersion(id='v2.0',
|
||||
status='CURRENT',
|
||||
link=resource.Link(href=host_url_v2, target='v2'))
|
||||
api_v2 = APIVersion(
|
||||
id='v2.0',
|
||||
status='CURRENT',
|
||||
link=resource.Link(href=host_url_v2, target='v2')
|
||||
)
|
||||
|
||||
return [api_v1, api_v2]
|
||||
return [api_v2]
|
||||
|
@ -1,202 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import pecan
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v1 import task
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Execution(resource.Resource):
|
||||
"""Execution resource."""
|
||||
|
||||
id = wtypes.text
|
||||
workbook_name = wtypes.text
|
||||
task = wtypes.text
|
||||
state = wtypes.text
|
||||
# Context is a JSON object but since WSME doesn't support arbitrary
|
||||
# dictionaries we have to use text type convert to json and back manually.
|
||||
context = wtypes.text
|
||||
|
||||
def to_dict(self):
|
||||
d = super(Execution, self).to_dict()
|
||||
|
||||
if d.get('context'):
|
||||
d['context'] = json.loads(d['context'])
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
# Nonetype check for dictionary must be explicit
|
||||
if key == 'context' and val is not None:
|
||||
val = json.dumps(val)
|
||||
setattr(e, key, val)
|
||||
|
||||
return e
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(
|
||||
id='1234',
|
||||
workbook_name='flow',
|
||||
task='doit',
|
||||
state='SUCCESS',
|
||||
context='{}'
|
||||
)
|
||||
|
||||
|
||||
class Executions(resource.Resource):
|
||||
"""A collection of Execution resources."""
|
||||
|
||||
executions = [Execution]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(executions=[Execution.sample()])
|
||||
|
||||
|
||||
class ExecutionsController(rest.RestController):
|
||||
def _get(self, id):
|
||||
return Execution.from_dict(db_api.execution_get(id).to_dict())
|
||||
|
||||
def _put(self, id, execution):
|
||||
db_model = db_api.execution_update(id, execution.to_dict())
|
||||
|
||||
return Execution.from_dict(db_model.to_dict())
|
||||
|
||||
def _delete(self, id):
|
||||
db_api.execution_delete(id)
|
||||
|
||||
def _get_all(self, **kwargs):
|
||||
executions = [Execution.from_dict(db_model.to_dict()) for db_model
|
||||
in db_api.executions_get(**kwargs)]
|
||||
|
||||
return Executions(executions=executions)
|
||||
|
||||
|
||||
class WorkbookExecutionsController(ExecutionsController):
|
||||
tasks = task.WorkbookTasksController()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text, wtypes.text)
|
||||
def get(self, workbook_name, id):
|
||||
"""Return the specified Execution."""
|
||||
LOG.info("Fetch execution [workbook_name=%s, id=%s]" %
|
||||
(workbook_name, id))
|
||||
|
||||
return self._get(id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text, wtypes.text, body=Execution)
|
||||
def put(self, workbook_name, id, execution):
|
||||
"""Update the specified Execution."""
|
||||
LOG.info("Update execution [workbook_name=%s, id=%s, execution=%s]" %
|
||||
(workbook_name, id, execution))
|
||||
|
||||
return self._put(id, execution)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text, body=Execution,
|
||||
status_code=201)
|
||||
def post(self, workbook_name, execution):
|
||||
"""Create a new Execution."""
|
||||
LOG.info("Create execution [workbook_name=%s, execution=%s]" %
|
||||
(workbook_name, execution))
|
||||
|
||||
if (db_api.workbook_get(workbook_name)
|
||||
and db_api.workbook_definition_get(workbook_name)):
|
||||
context = None
|
||||
|
||||
if execution.context:
|
||||
context = json.loads(execution.context)
|
||||
|
||||
engine = pecan.request.context['engine']
|
||||
|
||||
values = engine.start_workflow_execution(
|
||||
execution.workbook_name,
|
||||
execution.task,
|
||||
context
|
||||
)
|
||||
|
||||
return Execution.from_dict(values)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, wtypes.text, status_code=204)
|
||||
def delete(self, workbook_name, id):
|
||||
"""Delete the specified Execution."""
|
||||
LOG.info("Delete execution [workbook_name=%s, id=%s]" %
|
||||
(workbook_name, id))
|
||||
|
||||
return self._delete(id)
|
||||
|
||||
@wsme_pecan.wsexpose(Executions, wtypes.text)
|
||||
def get_all(self, workbook_name):
|
||||
"""Return all Executions."""
|
||||
LOG.info("Fetch executions [workbook_name=%s]" % workbook_name)
|
||||
|
||||
if db_api.workbook_get(workbook_name):
|
||||
return self._get_all(workbook_name=workbook_name)
|
||||
|
||||
|
||||
class RootExecutionsController(ExecutionsController):
|
||||
tasks = task.ExecutionTasksController()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text)
|
||||
def get(self, id):
|
||||
"""Return the specified Execution."""
|
||||
LOG.info("Fetch execution [id=%s]" % id)
|
||||
|
||||
return self._get(id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text, body=Execution)
|
||||
def put(self, id, execution):
|
||||
"""Update the specified Execution."""
|
||||
LOG.info("Update execution [id=%s, execution=%s]" %
|
||||
(id, execution))
|
||||
|
||||
return self._put(id, execution)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, id):
|
||||
"""Delete the specified Execution."""
|
||||
LOG.info("Delete execution [id=%s]" % id)
|
||||
|
||||
return self._delete(id)
|
||||
|
||||
@wsme_pecan.wsexpose(Executions)
|
||||
def get_all(self):
|
||||
"""Return all Executions."""
|
||||
LOG.info("Fetch executions")
|
||||
|
||||
return self._get_all()
|
@ -1,123 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Event(resource.Resource):
|
||||
"""Event descriptor."""
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls()
|
||||
|
||||
|
||||
class TaskEvent(Event):
|
||||
type = "TASK_STATE"
|
||||
task = wtypes.text
|
||||
|
||||
|
||||
class ExecutionEvent(Event):
|
||||
type = "EXECUTION_STATE"
|
||||
workbook_name = wtypes.text
|
||||
|
||||
|
||||
class Listener(resource.Resource):
|
||||
"""Listener resource."""
|
||||
|
||||
id = wtypes.text
|
||||
description = wtypes.text
|
||||
workbook_name = wtypes.text
|
||||
webhook = wtypes.text
|
||||
events = [Event]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='1234',
|
||||
workbook_name='flow',
|
||||
description='listener for my flow',
|
||||
webhook='http://example.com/here',
|
||||
events=[Event.sample()])
|
||||
|
||||
|
||||
class Listeners(resource.Resource):
|
||||
"""A collection of Listener resources."""
|
||||
|
||||
listeners = [Listener]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(listeners=[Listener.sample()])
|
||||
|
||||
|
||||
class ListenersController(rest.RestController):
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Listener, wtypes.text, wtypes.text)
|
||||
def get(self, workbook_name, id):
|
||||
"""Return the specified listener."""
|
||||
LOG.info("Fetch listener [workbook_name=%s, id=%s]" %
|
||||
(workbook_name, id))
|
||||
|
||||
return Listener.from_dict(db_api.listener_get(workbook_name, id))
|
||||
|
||||
@wsme_pecan.wsexpose(Listener, wtypes.text, wtypes.text, body=Listener)
|
||||
def put(self, workbook_name, id, listener):
|
||||
"""Update the specified listener."""
|
||||
LOG.info("Update listener [workbook_name=%s, id=%s, listener=%s]" %
|
||||
(workbook_name, id, listener))
|
||||
|
||||
values = db_api.listener_update(workbook_name, id, listener.to_dict())
|
||||
|
||||
return Listener.from_dict(values)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Listener, wtypes.text, body=Listener, status_code=201)
|
||||
def post(self, workbook_name, listener):
|
||||
"""Create a new listener."""
|
||||
LOG.info("Create listener [workbook_name=%s, listener=%s]" %
|
||||
(workbook_name, listener))
|
||||
|
||||
values = db_api.listener_create(workbook_name, listener.to_dict())
|
||||
|
||||
return Listener.from_dict(values)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, wtypes.text, status_code=204)
|
||||
def delete(self, workbook_name, id):
|
||||
"""Delete the specified listener."""
|
||||
LOG.info("Delete listener [workbook_name=%s, id=%s]" %
|
||||
(workbook_name, id))
|
||||
|
||||
db_api.listener_delete(workbook_name, id)
|
||||
|
||||
@wsme_pecan.wsexpose(Listeners, wtypes.text)
|
||||
def get_all(self, workbook_name):
|
||||
"""Return all listeners."""
|
||||
LOG.info("Fetch listeners [workbook_name=%s]" % workbook_name)
|
||||
|
||||
listeners = [Listener.from_dict(values)
|
||||
for values in db_api.listeners_get(workbook_name)]
|
||||
|
||||
return Listeners(listeners=listeners)
|
@ -1,45 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v1 import execution
|
||||
from mistral.api.controllers.v1 import task
|
||||
from mistral.api.controllers.v1 import workbook
|
||||
|
||||
|
||||
class RootResource(resource.Resource):
|
||||
"""Root resource for API version 1.
|
||||
|
||||
It references all other resources belonging to the API.
|
||||
"""
|
||||
|
||||
uri = wtypes.text
|
||||
|
||||
|
||||
class Controller(object):
|
||||
"""API root controller for version 1."""
|
||||
|
||||
workbooks = workbook.WorkbooksController()
|
||||
executions = execution.RootExecutionsController()
|
||||
tasks = task.RootTasksController()
|
||||
|
||||
@wsme_pecan.wsexpose(RootResource)
|
||||
def index(self):
|
||||
return RootResource(uri='%s/%s' % (pecan.request.host_url, 'v1'))
|
@ -1,204 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import pecan
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Task(resource.Resource):
|
||||
"""Task resource."""
|
||||
|
||||
id = wtypes.text
|
||||
"The ID of the Task."
|
||||
|
||||
workbook_name = wtypes.text
|
||||
execution_id = wtypes.text
|
||||
name = wtypes.text
|
||||
description = wtypes.text
|
||||
state = wtypes.text
|
||||
tags = [wtypes.text]
|
||||
output = wtypes.text
|
||||
parameters = wtypes.text
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
# Nonetype check for dictionary must be explicit
|
||||
if val is not None and (
|
||||
key == 'parameters' or key == 'output'):
|
||||
val = json.dumps(val)
|
||||
setattr(e, key, val)
|
||||
|
||||
return e
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='1234',
|
||||
workbook_name='notifier',
|
||||
execution_id='234',
|
||||
name='build_greeting',
|
||||
description='tell when you are done',
|
||||
state='OK',
|
||||
tags=['foo', 'fee'],
|
||||
output='{"task": {"build_greeting": '
|
||||
'{"greeting": "Hello, John Doe!"}}}',
|
||||
parameters='{ "first_name": "John", "last_name": "Doe"}')
|
||||
|
||||
|
||||
class Tasks(resource.Resource):
|
||||
"""A collection of tasks."""
|
||||
|
||||
tasks = [Task]
|
||||
"List of tasks."
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(tasks=[Task.sample()])
|
||||
|
||||
|
||||
class TasksController(rest.RestController):
|
||||
def _get(self, id):
|
||||
db_model = db_api.task_get(id)
|
||||
|
||||
return Task.from_dict(db_model.to_dict())
|
||||
|
||||
def _put(self, id, task):
|
||||
if db_api.task_get(id):
|
||||
engine = pecan.request.context['engine']
|
||||
|
||||
if task.output:
|
||||
try:
|
||||
raw_result = json.loads(task.output)
|
||||
except Exception:
|
||||
raw_result = task.output
|
||||
else:
|
||||
raw_result = None
|
||||
|
||||
values = engine.convey_task_result(id, task.state, raw_result)
|
||||
|
||||
return Task.from_dict(values)
|
||||
|
||||
def _get_all(self, **kwargs):
|
||||
tasks = [Task.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.tasks_get(**kwargs)]
|
||||
|
||||
return Tasks(tasks=tasks)
|
||||
|
||||
|
||||
class WorkbookTasksController(TasksController):
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text, wtypes.text, wtypes.text)
|
||||
def get(self, workbook_name, execution_id, id):
|
||||
"""Return the specified task."""
|
||||
LOG.info("Fetch task [workbook_name=%s, execution_id=%s, id=%s]" %
|
||||
(workbook_name, execution_id, id))
|
||||
|
||||
return self._get(id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text, wtypes.text, wtypes.text,
|
||||
body=Task)
|
||||
def put(self, workbook_name, execution_id, id, task):
|
||||
"""Update the specified task."""
|
||||
LOG.info("Update task "
|
||||
"[workbook_name=%s, execution_id=%s, id=%s, task=%s]" %
|
||||
(workbook_name, execution_id, id, task))
|
||||
|
||||
return self._put(id, task)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Tasks, wtypes.text, wtypes.text)
|
||||
def get_all(self, workbook_name, execution_id):
|
||||
"""Return all tasks within the execution."""
|
||||
db_api.ensure_execution_exists(execution_id)
|
||||
|
||||
LOG.info("Fetch tasks [workbook_name=%s, execution_id=%s]" %
|
||||
(workbook_name, execution_id))
|
||||
|
||||
return self._get_all(workbook_name=workbook_name,
|
||||
execution_id=execution_id)
|
||||
|
||||
|
||||
class ExecutionTasksController(TasksController):
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text, wtypes.text)
|
||||
def get(self, execution_id, id):
|
||||
"""Return the specified task."""
|
||||
LOG.info("Fetch task [execution_id=%s, id=%s]" %
|
||||
(execution_id, id))
|
||||
|
||||
return self._get(id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text, wtypes.text,
|
||||
body=Task)
|
||||
def put(self, execution_id, id, task):
|
||||
"""Update the specified task."""
|
||||
LOG.info("Update task "
|
||||
"[execution_id=%s, id=%s, task=%s]" %
|
||||
(execution_id, id, task))
|
||||
|
||||
return self._put(id, task)
|
||||
|
||||
@wsme_pecan.wsexpose(Tasks, wtypes.text)
|
||||
def get_all(self, execution_id):
|
||||
"""Return all tasks within the execution."""
|
||||
LOG.info("Fetch tasks [execution_id=%s]" % execution_id)
|
||||
|
||||
return self._get_all(execution_id=execution_id)
|
||||
|
||||
|
||||
class RootTasksController(TasksController):
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text)
|
||||
def get(self, id):
|
||||
"""Return the specified task."""
|
||||
LOG.info("Fetch task [id=%s]" % id)
|
||||
|
||||
return self._get(id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text,
|
||||
body=Task)
|
||||
def put(self, id, task):
|
||||
"""Update the specified task."""
|
||||
LOG.info("Update task "
|
||||
"[id=%s, task=%s]" %
|
||||
(id, task))
|
||||
|
||||
return self._put(id, task)
|
||||
|
||||
@wsme_pecan.wsexpose(Tasks)
|
||||
def get_all(self):
|
||||
"""Return all tasks within the execution."""
|
||||
LOG.info("Fetch tasks")
|
||||
|
||||
return self._get_all()
|
@ -1,113 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v1 import execution
|
||||
from mistral.api.controllers.v1 import listener
|
||||
from mistral.api.controllers.v1 import workbook_definition
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workbooks
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
SCOPE_TYPES = wtypes.Enum(str, 'private', 'public')
|
||||
|
||||
|
||||
class Workbook(resource.Resource):
|
||||
"""Workbook resource."""
|
||||
|
||||
name = wtypes.text
|
||||
description = wtypes.text
|
||||
tags = [wtypes.text]
|
||||
scope = SCOPE_TYPES
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(name='flow',
|
||||
description='my workflow',
|
||||
tags=['large', 'expensive'])
|
||||
|
||||
|
||||
class Workbooks(resource.Resource):
|
||||
"""A collection of Workbooks."""
|
||||
|
||||
workbooks = [Workbook]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(workbooks=[Workbook.sample()])
|
||||
|
||||
|
||||
class WorkbooksController(rest.RestController):
|
||||
definition = workbook_definition.WorkbookDefinitionController()
|
||||
listeners = listener.ListenersController()
|
||||
executions = execution.WorkbookExecutionsController()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Workbook, wtypes.text)
|
||||
def get(self, name):
|
||||
"""Return the named workbook."""
|
||||
LOG.info("Fetch workbook [name=%s]" % name)
|
||||
|
||||
db_model = db_api.workbook_get(name)
|
||||
|
||||
return Workbook.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Workbook, wtypes.text, body=Workbook)
|
||||
def put(self, name, workbook):
|
||||
"""Update the named workbook."""
|
||||
LOG.info("Update workbook [name=%s, workbook=%s]" % (name, workbook))
|
||||
|
||||
db_model = db_api.workbook_update(name, workbook.to_dict())
|
||||
|
||||
return Workbook.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Workbook, body=Workbook, status_code=201)
|
||||
def post(self, workbook):
|
||||
"""Create a new workbook."""
|
||||
LOG.info("Create workbook [workbook=%s]" % workbook)
|
||||
|
||||
db_model = workbooks.create_workbook_v1(workbook.to_dict())
|
||||
|
||||
return Workbook.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, name):
|
||||
"""Delete the named workbook."""
|
||||
LOG.info("Delete workbook [name=%s]" % name)
|
||||
db_api.workbook_delete(name)
|
||||
|
||||
@wsme_pecan.wsexpose(Workbooks)
|
||||
def get_all(self):
|
||||
"""return all workbooks.
|
||||
|
||||
Where project_id is the same as the requestor or
|
||||
project_id is different but the scope is public.
|
||||
"""
|
||||
LOG.info("Fetch workbooks.")
|
||||
|
||||
workbooks_list = [Workbook.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.workbooks_get()]
|
||||
|
||||
return Workbooks(workbooks=workbooks_list)
|
@ -1,49 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workbooks
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkbookDefinitionController(pecan.rest.RestController):
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose()
|
||||
def get(self, workbook_name):
|
||||
"""Return the workbook definition."""
|
||||
LOG.info("Fetch workbook definition [workbook_name=%s]" %
|
||||
workbook_name)
|
||||
|
||||
return db_api.workbook_get(workbook_name).definition
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def put(self, workbook_name):
|
||||
"""Update workbook definition."""
|
||||
text = pecan.request.text
|
||||
|
||||
LOG.info("Update workbook definition [workbook_name=%s, text=%s]" %
|
||||
(workbook_name, text))
|
||||
|
||||
wb = workbooks.update_workbook_v1(workbook_name, {'definition': text})
|
||||
|
||||
return wb.definition
|
@ -136,8 +136,7 @@ class ActionExecutionsController(rest.RestController):
|
||||
([states.SUCCESS, states.ERROR], action_execution.state)
|
||||
)
|
||||
|
||||
engine = rpc.get_engine_client()
|
||||
values = engine.on_action_complete(id, result)
|
||||
values = rpc.get_engine_client().on_action_complete(id, result)
|
||||
|
||||
return ActionExecution.from_dict(values)
|
||||
|
||||
|
@ -1,31 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from pecan import hooks
|
||||
|
||||
from mistral import engine
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EngineHook(hooks.PecanHook):
|
||||
|
||||
def __init__(self, transport=None):
|
||||
self.transport = engine.get_transport(transport)
|
||||
self.engine = engine.EngineClient(self.transport)
|
||||
|
||||
def before(self, state):
|
||||
state.request.context['engine'] = self.engine
|
@ -42,8 +42,6 @@ from mistral.api import app
|
||||
from mistral import config
|
||||
from mistral import context as ctx
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral import engine
|
||||
from mistral.engine import executor
|
||||
from mistral.engine1 import default_engine as def_eng
|
||||
from mistral.engine1 import default_executor as def_executor
|
||||
from mistral.engine1 import rpc
|
||||
@ -61,13 +59,9 @@ def launch_executor(transport):
|
||||
server=cfg.CONF.executor.host
|
||||
)
|
||||
|
||||
# Since engine and executor are tightly coupled, use the engine
|
||||
# configuration to decide which executor to get.
|
||||
executor_v1 = executor.get_executor(cfg.CONF.engine.engine, transport)
|
||||
|
||||
executor_v2 = def_executor.DefaultExecutor(rpc.get_engine_client())
|
||||
|
||||
endpoints = [executor_v1, rpc.ExecutorServer(executor_v2)]
|
||||
endpoints = [rpc.ExecutorServer(executor_v2)]
|
||||
|
||||
server = messaging.get_rpc_server(
|
||||
transport,
|
||||
@ -87,10 +81,9 @@ def launch_engine(transport):
|
||||
server=cfg.CONF.engine.host
|
||||
)
|
||||
|
||||
engine_v1 = engine.get_engine(cfg.CONF.engine.engine, transport)
|
||||
engine_v2 = def_eng.DefaultEngine(rpc.get_engine_client())
|
||||
|
||||
endpoints = [engine_v1, rpc.EngineServer(engine_v2)]
|
||||
endpoints = [rpc.EngineServer(engine_v2)]
|
||||
|
||||
# Setup scheduler in engine.
|
||||
db_api.setup_db()
|
||||
@ -115,7 +108,7 @@ def launch_api(transport):
|
||||
server = simple_server.make_server(
|
||||
host,
|
||||
port,
|
||||
app.setup_app(transport=transport)
|
||||
app.setup_app()
|
||||
)
|
||||
|
||||
LOG.info("Mistral API is serving on http://%s:%s (PID=%s)" %
|
||||
|
@ -1,207 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import contextlib
|
||||
|
||||
from oslo.db import api as db_api
|
||||
|
||||
from mistral import exceptions
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
# Workbooks
|
||||
|
||||
_BACKEND_MAPPING = {
|
||||
'sqlalchemy': 'mistral.db.v1.sqlalchemy.api',
|
||||
}
|
||||
|
||||
IMPL = db_api.DBAPI('sqlalchemy', backend_mapping=_BACKEND_MAPPING)
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_db():
|
||||
IMPL.setup_db()
|
||||
|
||||
|
||||
def drop_db():
|
||||
IMPL.drop_db()
|
||||
|
||||
|
||||
# Transaction control.
|
||||
|
||||
|
||||
def start_tx():
|
||||
IMPL.start_tx()
|
||||
|
||||
|
||||
def commit_tx():
|
||||
IMPL.commit_tx()
|
||||
|
||||
|
||||
def rollback_tx():
|
||||
IMPL.rollback_tx()
|
||||
|
||||
|
||||
def end_tx():
|
||||
IMPL.end_tx()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def transaction():
|
||||
with IMPL.transaction():
|
||||
yield
|
||||
|
||||
|
||||
# Workbook
|
||||
|
||||
|
||||
def workbook_get(name):
|
||||
return IMPL.workbook_get(name)
|
||||
|
||||
|
||||
def workbook_create(values):
|
||||
return IMPL.workbook_create(values)
|
||||
|
||||
|
||||
def workbook_update(name, values):
|
||||
return IMPL.workbook_update(name, values)
|
||||
|
||||
|
||||
def workbook_delete(name):
|
||||
IMPL.workbook_delete(name)
|
||||
|
||||
|
||||
def workbooks_get():
|
||||
return IMPL.workbooks_get_all()
|
||||
|
||||
|
||||
def workbook_definition_get(workbook_name):
|
||||
definition = IMPL.workbook_get(workbook_name)['definition']
|
||||
if not definition:
|
||||
raise exceptions.NotFoundException("Definition of workbook "
|
||||
"%s is empty." % workbook_name)
|
||||
return definition
|
||||
|
||||
|
||||
def workbook_definition_put(workbook_name, text):
|
||||
return IMPL.workbook_update(workbook_name, {'definition': text})
|
||||
|
||||
|
||||
def workbooks_delete(**kwargs):
|
||||
return IMPL.workbooks_delete(**kwargs)
|
||||
|
||||
|
||||
# Executions
|
||||
|
||||
|
||||
def execution_get(id):
|
||||
return IMPL.execution_get(id)
|
||||
|
||||
|
||||
def ensure_execution_exists(execution_id):
|
||||
return IMPL.ensure_execution_exists(execution_id)
|
||||
|
||||
|
||||
def execution_create(workbook_name, values):
|
||||
return IMPL.execution_create(workbook_name, values)
|
||||
|
||||
|
||||
def execution_update(id, values):
|
||||
return IMPL.execution_update(id, values)
|
||||
|
||||
|
||||
def execution_delete(id):
|
||||
return IMPL.execution_delete(id)
|
||||
|
||||
|
||||
def executions_get(**kwargs):
|
||||
return IMPL.executions_get(**kwargs)
|
||||
|
||||
|
||||
def executions_delete(**kwargs):
|
||||
return IMPL.executions_delete(**kwargs)
|
||||
|
||||
|
||||
# Tasks
|
||||
|
||||
def task_get(id):
|
||||
return IMPL.task_get(id)
|
||||
|
||||
|
||||
def task_create(execution_id, values):
|
||||
return IMPL.task_create(execution_id, values)
|
||||
|
||||
|
||||
def task_update(id, values):
|
||||
return IMPL.task_update(id, values)
|
||||
|
||||
|
||||
def task_delete(id):
|
||||
return IMPL.task_delete(id)
|
||||
|
||||
|
||||
def tasks_delete(**kwargs):
|
||||
return IMPL.tasks_delete(**kwargs)
|
||||
|
||||
|
||||
def tasks_get(**kwargs):
|
||||
return IMPL.tasks_get(**kwargs)
|
||||
|
||||
|
||||
# Listeners
|
||||
|
||||
|
||||
def listener_get(workbook_name, id):
|
||||
return {}
|
||||
|
||||
|
||||
def listener_create(workbook_name, values):
|
||||
values['id'] = 1
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def listener_update(workbook_name, id, values):
|
||||
return values
|
||||
|
||||
|
||||
def listener_delete(workbook_name, id):
|
||||
pass
|
||||
|
||||
|
||||
def listeners_get(workbook_name):
|
||||
return [{}]
|
||||
|
||||
|
||||
# Triggers
|
||||
|
||||
def trigger_create(values):
|
||||
return IMPL.trigger_create(values)
|
||||
|
||||
|
||||
def triggers_get(**kwargs):
|
||||
return IMPL.triggers_get_all(**kwargs)
|
||||
|
||||
|
||||
def trigger_update(trigger_id, values):
|
||||
return IMPL.trigger_update(trigger_id, values)
|
||||
|
||||
|
||||
def get_next_triggers(time):
|
||||
return IMPL.get_next_triggers(time)
|
||||
|
||||
|
||||
def triggers_delete(**kwargs):
|
||||
return IMPL.triggers_delete(**kwargs)
|
@ -1,383 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import contextlib
|
||||
import sys
|
||||
|
||||
from oslo.db import exception as db_exc
|
||||
import sqlalchemy as sa
|
||||
|
||||
from mistral import context
|
||||
from mistral.db.sqlalchemy import base as b
|
||||
from mistral.db.v1.sqlalchemy import models
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_backend():
|
||||
"""Consumed by openstack common code.
|
||||
|
||||
The backend is this module itself.
|
||||
:return Name of db backend.
|
||||
"""
|
||||
return sys.modules[__name__]
|
||||
|
||||
|
||||
def setup_db():
|
||||
try:
|
||||
models.Workbook.metadata.create_all(b.get_engine())
|
||||
except sa.exc.OperationalError as e:
|
||||
raise exc.DBException("Failed to setup database: %s" % e)
|
||||
|
||||
|
||||
def drop_db():
|
||||
global _facade
|
||||
|
||||
try:
|
||||
models.Workbook.metadata.drop_all(b.get_engine())
|
||||
_facade = None
|
||||
except Exception as e:
|
||||
raise exc.DBException("Failed to drop database: %s" % e)
|
||||
|
||||
|
||||
# Transaction management.
|
||||
|
||||
def start_tx():
|
||||
b.start_tx()
|
||||
|
||||
|
||||
def commit_tx():
|
||||
b.commit_tx()
|
||||
|
||||
|
||||
def rollback_tx():
|
||||
b.rollback_tx()
|
||||
|
||||
|
||||
def end_tx():
|
||||
b.end_tx()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def transaction():
|
||||
try:
|
||||
start_tx()
|
||||
yield
|
||||
commit_tx()
|
||||
finally:
|
||||
end_tx()
|
||||
|
||||
|
||||
def _delete_all(model, session=None, **kwargs):
|
||||
query = b.model_query(model)
|
||||
query.filter_by(**kwargs).delete()
|
||||
|
||||
|
||||
# Triggers.
|
||||
|
||||
@b.session_aware()
|
||||
def trigger_create(values, session=None):
|
||||
trigger = models.Trigger()
|
||||
trigger.update(values.copy())
|
||||
|
||||
try:
|
||||
trigger.save(session)
|
||||
except db_exc.DBDuplicateEntry as e:
|
||||
raise exc.DBDuplicateEntry("Duplicate entry for Trigger: %s"
|
||||
% e.columns)
|
||||
|
||||
return trigger
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def trigger_update(trigger_id, values, session=None):
|
||||
trigger = _trigger_get(trigger_id)
|
||||
if trigger is None:
|
||||
raise exc.NotFoundException("Trigger not found [trigger_id=%s]" %
|
||||
trigger_id)
|
||||
|
||||
trigger.update(values.copy())
|
||||
|
||||
return trigger
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def trigger_delete(trigger_id, session=None):
|
||||
trigger = _trigger_get(trigger_id)
|
||||
if not trigger:
|
||||
raise exc.NotFoundException("Trigger not found [trigger_id=%s]" %
|
||||
trigger_id)
|
||||
|
||||
session.delete(trigger)
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def get_next_triggers(time, session=None):
|
||||
query = b.model_query(models.Trigger)
|
||||
query = query.filter(models.Trigger.next_execution_time < time)
|
||||
query = query.order_by(models.Trigger.next_execution_time)
|
||||
return query.all()
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def _trigger_get(trigger_id, session=None):
|
||||
query = b.model_query(models.Trigger)
|
||||
return query.filter_by(id=trigger_id).first()
|
||||
|
||||
|
||||
def trigger_get(trigger_id):
|
||||
trigger = _trigger_get(trigger_id)
|
||||
if not trigger:
|
||||
raise exc.NotFoundException("Trigger not found [trigger_id=%s]" %
|
||||
trigger_id)
|
||||
return trigger
|
||||
|
||||
|
||||
def _triggers_get_all(**kwargs):
|
||||
query = b.model_query(models.Trigger)
|
||||
return query.filter_by(**kwargs).all()
|
||||
|
||||
|
||||
def triggers_get_all(**kwargs):
|
||||
return _triggers_get_all(**kwargs)
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def triggers_delete(**kwargs):
|
||||
return _delete_all(models.Trigger, **kwargs)
|
||||
|
||||
|
||||
# Workbooks.
|
||||
|
||||
@b.session_aware()
|
||||
def workbook_create(values, session=None):
|
||||
workbook = models.Workbook()
|
||||
workbook.update(values.copy())
|
||||
workbook['project_id'] = context.ctx().project_id
|
||||
|
||||
try:
|
||||
workbook.save(session=session)
|
||||
except db_exc.DBDuplicateEntry as e:
|
||||
raise exc.DBDuplicateEntry("Duplicate entry for Workbook: %s"
|
||||
% e.columns)
|
||||
|
||||
return workbook
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def workbook_update(workbook_name, values, session=None):
|
||||
workbook = _workbook_get(workbook_name)
|
||||
|
||||
if not workbook:
|
||||
raise exc.NotFoundException(
|
||||
"Workbook not found [workbook_name=%s]" % workbook_name)
|
||||
|
||||
workbook.update(values.copy())
|
||||
workbook['project_id'] = context.ctx().project_id
|
||||
|
||||
return workbook
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def workbook_delete(workbook_name, session=None):
|
||||
workbook = _workbook_get(workbook_name)
|
||||
if not workbook:
|
||||
raise exc.NotFoundException(
|
||||
"Workbook not found [workbook_name=%s]" % workbook_name)
|
||||
|
||||
session.delete(workbook)
|
||||
|
||||
|
||||
def workbook_get(workbook_name):
|
||||
workbook = _workbook_get(workbook_name)
|
||||
|
||||
if not workbook:
|
||||
raise exc.NotFoundException(
|
||||
"Workbook not found [workbook_name=%s]" % workbook_name)
|
||||
|
||||
return workbook
|
||||
|
||||
|
||||
def workbooks_get_all(**kwargs):
|
||||
return _workbooks_get_all(**kwargs)
|
||||
|
||||
|
||||
def _workbooks_get_all(**kwargs):
|
||||
query = b.model_query(models.Workbook)
|
||||
proj = query.filter_by(project_id=context.ctx().project_id,
|
||||
**kwargs)
|
||||
public = query.filter_by(scope='public', **kwargs)
|
||||
return proj.union(public).all()
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def _workbook_get(workbook_name, session=None):
|
||||
query = b.model_query(models.Workbook)
|
||||
if context.ctx().is_admin:
|
||||
return query.filter_by(name=workbook_name).first()
|
||||
else:
|
||||
return query.filter_by(name=workbook_name,
|
||||
project_id=context.ctx().project_id).first()
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def workbooks_delete(**kwargs):
|
||||
return _delete_all(models.Workbook, **kwargs)
|
||||
|
||||
|
||||
# Workflow executions.
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def execution_create(workbook_name, values, session=None):
|
||||
execution = models.WorkflowExecution()
|
||||
execution.update(values.copy())
|
||||
execution.update({'workbook_name': workbook_name})
|
||||
|
||||
try:
|
||||
execution.save(session=session)
|
||||
except db_exc.DBDuplicateEntry as e:
|
||||
raise exc.DBDuplicateEntry("Duplicate entry for Execution: %s"
|
||||
% e.columns)
|
||||
|
||||
return execution
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def execution_update(execution_id, values, session=None):
|
||||
execution = _execution_get(execution_id)
|
||||
if not execution:
|
||||
raise exc.NotFoundException(
|
||||
"Execution not found [execution_id=%s]" % execution_id)
|
||||
|
||||
execution.update(values.copy())
|
||||
|
||||
return execution
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def execution_delete(execution_id, session=None):
|
||||
execution = _execution_get(execution_id)
|
||||
if not execution:
|
||||
raise exc.NotFoundException(
|
||||
"Execution not found [execution_id=%s]" % execution_id)
|
||||
|
||||
session.delete(execution)
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def executions_delete(**kwargs):
|
||||
return _delete_all(models.WorkflowExecution, **kwargs)
|
||||
|
||||
|
||||
def execution_get(execution_id):
|
||||
execution = _execution_get(execution_id)
|
||||
|
||||
if not execution:
|
||||
raise exc.NotFoundException(
|
||||
"Execution not found [execution_id=%s]" % execution_id)
|
||||
|
||||
return execution
|
||||
|
||||
|
||||
def ensure_execution_exists(execution_id):
|
||||
execution_get(execution_id)
|
||||
|
||||
|
||||
def executions_get(**kwargs):
|
||||
return _executions_get(**kwargs)
|
||||
|
||||
|
||||
def _executions_get(**kwargs):
|
||||
query = b.model_query(models.WorkflowExecution)
|
||||
return query.filter_by(**kwargs).all()
|
||||
|
||||
|
||||
def _execution_get(execution_id):
|
||||
query = b.model_query(models.WorkflowExecution)
|
||||
|
||||
return query.filter_by(id=execution_id).first()
|
||||
|
||||
|
||||
# Workflow tasks.
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def task_create(execution_id, values, session=None):
|
||||
task = models.Task()
|
||||
task.update(values)
|
||||
task.update({'execution_id': execution_id})
|
||||
|
||||
try:
|
||||
task.save(session=session)
|
||||
except db_exc.DBDuplicateEntry as e:
|
||||
raise exc.DBDuplicateEntry("Duplicate entry for Task: %s"
|
||||
% e.columns)
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def task_update(task_id, values, session=None):
|
||||
task = _task_get(task_id)
|
||||
if not task:
|
||||
raise exc.NotFoundException(
|
||||
"Task not found [task_id=%s]" % task_id)
|
||||
|
||||
task.update(values.copy())
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def task_delete(task_id, session=None):
|
||||
task = _task_get(task_id)
|
||||
if not task:
|
||||
raise exc.NotFoundException(
|
||||
"Task not found [task_id=%s]" % task_id)
|
||||
|
||||
session.delete(task)
|
||||
|
||||
|
||||
@b.session_aware()
|
||||
def tasks_delete(**kwargs):
|
||||
return _delete_all(models.Task, **kwargs)
|
||||
|
||||
|
||||
def task_get(task_id):
|
||||
task = _task_get(task_id)
|
||||
if not task:
|
||||
raise exc.NotFoundException(
|
||||
"Task not found [task_id=%s]" % task_id)
|
||||
|
||||
return task
|
||||
|
||||
|
||||
def _task_get(task_id):
|
||||
query = b.model_query(models.Task)
|
||||
return query.filter_by(id=task_id).first()
|
||||
|
||||
|
||||
def tasks_get(**kwargs):
|
||||
return _tasks_get(**kwargs)
|
||||
|
||||
|
||||
def _tasks_get(**kwargs):
|
||||
query = b.model_query(models.Task)
|
||||
return query.filter_by(**kwargs).all()
|
@ -1,94 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from mistral.db.sqlalchemy import model_base as mb
|
||||
from mistral.db.sqlalchemy import types as st
|
||||
|
||||
|
||||
class Workbook(mb.MistralModelBase):
|
||||
"""Contains info about workbook (including definition in Mistral DSL)."""
|
||||
|
||||
__tablename__ = 'workbooks'
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint('name'),
|
||||
)
|
||||
|
||||
id = mb.id_column()
|
||||
name = sa.Column(sa.String(80), primary_key=True)
|
||||
definition = sa.Column(sa.Text(), nullable=True)
|
||||
description = sa.Column(sa.String(200))
|
||||
tags = sa.Column(st.JsonListType())
|
||||
scope = sa.Column(sa.String(80))
|
||||
project_id = sa.Column(sa.String(80))
|
||||
trust_id = sa.Column(sa.String(80))
|
||||
|
||||
|
||||
class WorkflowExecution(mb.MistralModelBase):
|
||||
"""Contains info about particular workflow execution."""
|
||||
|
||||
__tablename__ = 'workflow_executions'
|
||||
|
||||
id = mb.id_column()
|
||||
workbook_name = sa.Column(sa.String(80))
|
||||
task = sa.Column(sa.String(80))
|
||||
state = sa.Column(sa.String(20))
|
||||
context = sa.Column(st.JsonDictType())
|
||||
|
||||
|
||||
class Task(mb.MistralModelBase):
|
||||
"""Contains info about particular task."""
|
||||
|
||||
__tablename__ = 'tasks'
|
||||
|
||||
id = mb.id_column()
|
||||
name = sa.Column(sa.String(80))
|
||||
requires = sa.Column(st.JsonListType())
|
||||
workbook_name = sa.Column(sa.String(80))
|
||||
execution_id = sa.Column(sa.String(36))
|
||||
description = sa.Column(sa.String(200))
|
||||
task_spec = sa.Column(st.JsonDictType())
|
||||
action_spec = sa.Column(st.JsonDictType())
|
||||
state = sa.Column(sa.String(20))
|
||||
tags = sa.Column(st.JsonListType())
|
||||
|
||||
# Data Flow properties.
|
||||
in_context = sa.Column(st.JsonDictType())
|
||||
parameters = sa.Column(st.JsonDictType())
|
||||
output = sa.Column(st.JsonDictType())
|
||||
|
||||
# Runtime context like iteration_no of a repeater.
|
||||
# Effectively internal engine properties which will be used to determine
|
||||
# execution of a task.
|
||||
task_runtime_context = sa.Column(st.JsonDictType())
|
||||
|
||||
|
||||
class Trigger(mb.MistralModelBase):
|
||||
"""Contains all info about trigger."""
|
||||
|
||||
__tablename__ = 'triggers'
|
||||
|
||||
__table_args__ = (
|
||||
sa.UniqueConstraint('name'),
|
||||
)
|
||||
|
||||
id = mb.id_column()
|
||||
name = sa.Column(sa.String(80), nullable=False)
|
||||
pattern = sa.Column(sa.String(20), nullable=False)
|
||||
next_execution_time = sa.Column(sa.DateTime, nullable=False)
|
||||
workbook_name = sa.Column(sa.String(80), nullable=False)
|
@ -1,601 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
import abc
|
||||
import copy
|
||||
|
||||
import eventlet
|
||||
from oslo.config import cfg
|
||||
from oslo import messaging
|
||||
import six
|
||||
from stevedore import driver
|
||||
|
||||
# Submodules of mistral.engine will throw NoSuchOptError if configuration
|
||||
# options required at top level of this __init__.py are not imported before
|
||||
# the submodules are referenced.
|
||||
cfg.CONF.import_opt('workflow_trace_log_name', 'mistral.config')
|
||||
|
||||
from mistral import context as auth_context
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.engine import data_flow
|
||||
from mistral.engine import retry
|
||||
from mistral.engine import states
|
||||
from mistral.engine import workflow
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import action_manager as a_m
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
WF_TRACE = logging.getLogger(cfg.CONF.workflow_trace_log_name)
|
||||
|
||||
|
||||
def get_transport(transport=None):
|
||||
return transport if transport else messaging.get_transport(cfg.CONF)
|
||||
|
||||
|
||||
def get_engine(name, transport):
|
||||
mgr = driver.DriverManager(
|
||||
namespace='mistral.engine.drivers',
|
||||
name=name,
|
||||
invoke_on_load=True,
|
||||
invoke_kwds={'transport': transport})
|
||||
return mgr.driver
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Engine(object):
|
||||
"""Abstract engine for workflow execution."""
|
||||
|
||||
transport = None
|
||||
|
||||
def __init__(self, transport=None):
|
||||
self.transport = get_transport(transport)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _run_task(self, task_id, action_name, action_params):
|
||||
raise NotImplementedError()
|
||||
|
||||
def start_workflow_execution(self, cntx, **kwargs):
|
||||
"""Starts a workflow execution based on the specified workbook name
|
||||
and target task.
|
||||
|
||||
:param cntx: a request context dict
|
||||
:type cntx: MistralContext
|
||||
:param kwargs: a dict of method arguments
|
||||
:type kwargs: dict
|
||||
:return: Workflow execution.
|
||||
"""
|
||||
workbook_name = kwargs.get('workbook_name')
|
||||
task_name = kwargs.get('task_name')
|
||||
context = kwargs.get('context', None)
|
||||
|
||||
context = copy.copy(context) if context else {}
|
||||
|
||||
WF_TRACE.info("New execution started - [workbook_name = '%s', "
|
||||
"task_name = '%s']" % (workbook_name, task_name))
|
||||
|
||||
db_api.start_tx()
|
||||
|
||||
# Persist execution and tasks in DB.
|
||||
try:
|
||||
workbook = self._get_workbook(workbook_name)
|
||||
execution = self._create_execution(workbook_name, task_name,
|
||||
context)
|
||||
|
||||
# Create the whole tree of tasks required by target task, including
|
||||
# target task itself.
|
||||
tasks = self._create_tasks(
|
||||
workflow.find_workflow_tasks(workbook, task_name),
|
||||
workbook,
|
||||
workbook_name, execution.id
|
||||
)
|
||||
|
||||
# Create a list of tasks that can be executed immediately (have
|
||||
# their requirements satisfied, or, at that point, rather don't
|
||||
# have them at all) along with the list of tasks that require some
|
||||
# delay before they'll be executed.
|
||||
tasks_to_start, delayed_tasks = workflow.find_resolved_tasks(tasks)
|
||||
|
||||
# Populate context with special variables such as `openstack` and
|
||||
# `__execution`.
|
||||
self._add_variables_to_data_flow_context(context, execution)
|
||||
|
||||
# Update task with new context and params.
|
||||
executables = data_flow.prepare_tasks(tasks_to_start,
|
||||
context,
|
||||
workbook,
|
||||
tasks)
|
||||
|
||||
db_api.commit_tx()
|
||||
except Exception as e:
|
||||
msg = "Failed to start workflow execution: %s" % e
|
||||
LOG.exception(msg)
|
||||
raise exc.EngineException(msg)
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
for task in delayed_tasks:
|
||||
self._schedule_run(workbook, task, context)
|
||||
|
||||
for task_id, action_name, action_params in executables:
|
||||
self._run_task(task_id, action_name, action_params)
|
||||
|
||||
return execution.to_dict()
|
||||
|
||||
def stop_workflow_execution(self, cntx, **kwargs):
|
||||
"""Stops the workflow execution with the given id.
|
||||
|
||||
:param cntx: a request context dict
|
||||
:type cntx: dict
|
||||
:param kwargs: a dict of method arguments
|
||||
:type kwargs: dict
|
||||
:return: Workflow execution.
|
||||
"""
|
||||
execution_id = kwargs.get('execution_id')
|
||||
|
||||
return db_api.execution_update(
|
||||
execution_id, {"state": states.STOPPED}
|
||||
).to_dict()
|
||||
|
||||
def convey_task_result(self, cntx, **kwargs):
|
||||
"""Conveys task result to Mistral Engine.
|
||||
|
||||
This method should be used by clients of Mistral Engine to update
|
||||
state of a task once task action has been performed. One of the
|
||||
clients of this method is Mistral REST API server that receives
|
||||
task result from the outside action handlers.
|
||||
|
||||
Note: calling this method serves an event notifying Mistral that
|
||||
it possibly needs to move the workflow on, i.e. run other workflow
|
||||
tasks for which all dependencies are satisfied.
|
||||
|
||||
:param cntx: a request context dict
|
||||
:type cntx: dict
|
||||
:param kwargs: a dict of method arguments
|
||||
:type kwargs: dict
|
||||
:return: Task.
|
||||
"""
|
||||
task_id = kwargs.get('task_id')
|
||||
state = kwargs.get('state')
|
||||
result = kwargs.get('result')
|
||||
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
# TODO(rakhmerov): validate state transition
|
||||
task = db_api.task_get(task_id)
|
||||
workbook = self._get_workbook(task.workbook_name)
|
||||
|
||||
if state == states.ERROR:
|
||||
WF_TRACE.info("Task '%s' [%s -> %s]" %
|
||||
(task.name, task.state, state))
|
||||
else:
|
||||
WF_TRACE.info("Task '%s' [%s -> %s, result = %s]" %
|
||||
(task.name, task.state, state, result))
|
||||
|
||||
action_name = spec_parser.get_task_spec(
|
||||
task.task_spec).get_full_action_name()
|
||||
|
||||
if not a_m.get_action_class(action_name):
|
||||
action = a_m.resolve_adhoc_action_name(workbook, action_name)
|
||||
|
||||
if not action:
|
||||
msg = 'Unknown action [workbook=%s, action=%s]' % (
|
||||
workbook, action_name)
|
||||
raise exc.ActionException(msg)
|
||||
|
||||
result = a_m.convert_adhoc_action_result(workbook,
|
||||
action_name,
|
||||
result)
|
||||
|
||||
task_output = data_flow.get_task_output(task, result)
|
||||
|
||||
# Update task state.
|
||||
task, context = self._update_task(workbook, task, state,
|
||||
task_output)
|
||||
|
||||
self._create_next_tasks(task, workbook)
|
||||
|
||||
# At that point, sqlalchemy tries to flush the changes in task
|
||||
# to the db and, in some cases, hits sqlite database lock
|
||||
# established by another thread of convey_task_results executed
|
||||
# at the same time (for example, as a result of two std.echo
|
||||
# tasks started one after another within the same self._run_task
|
||||
# call). By separating the transaction into two, we creating a
|
||||
# window of opportunity for task changes to be flushed. The
|
||||
# possible ramifications are unclear at the moment and should be
|
||||
# a subject of further review.
|
||||
|
||||
# TODO(rakhmerov): review the possibility to use a single
|
||||
# transaction after switching to the db with better support of
|
||||
# concurrency.
|
||||
db_api.commit_tx()
|
||||
except Exception as e:
|
||||
msg = "Failed to save task result: %s" % e
|
||||
LOG.exception(msg)
|
||||
raise exc.EngineException(msg)
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
execution = db_api.execution_get(task.execution_id)
|
||||
|
||||
# Determine what tasks need to be started.
|
||||
tasks = db_api.tasks_get(execution_id=execution.id)
|
||||
|
||||
new_exec_state = self._determine_execution_state(execution, tasks)
|
||||
|
||||
if execution.state != new_exec_state:
|
||||
WF_TRACE.info(
|
||||
"Execution '%s' [%s -> %s]" %
|
||||
(execution.id, execution.state, new_exec_state)
|
||||
)
|
||||
|
||||
execution = db_api.execution_update(execution.id, {
|
||||
"state": new_exec_state
|
||||
})
|
||||
|
||||
LOG.info("Changed execution state: %s" % execution)
|
||||
|
||||
# Create a list of tasks that can be executed immediately (have
|
||||
# their requirements satisfied) along with the list of tasks that
|
||||
# require some delay before they'll be executed.
|
||||
tasks_to_start, delayed_tasks = workflow.find_resolved_tasks(tasks)
|
||||
|
||||
# Populate context with special variables such as `openstack` and
|
||||
# `__execution`.
|
||||
self._add_variables_to_data_flow_context(context, execution)
|
||||
|
||||
# Update task with new context and params.
|
||||
executables = data_flow.prepare_tasks(tasks_to_start,
|
||||
context,
|
||||
workbook,
|
||||
tasks)
|
||||
|
||||
db_api.commit_tx()
|
||||
except Exception as e:
|
||||
msg = "Failed to queue next batch of tasks: %s" % e
|
||||
LOG.exception(msg)
|
||||
raise exc.EngineException(msg)
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
if states.is_stopped_or_finished(execution.state):
|
||||
return task
|
||||
|
||||
for task in delayed_tasks:
|
||||
self._schedule_run(workbook, task, context)
|
||||
|
||||
for task_id, action_name, action_params in executables:
|
||||
self._run_task(task_id, action_name, action_params)
|
||||
|
||||
return task.to_dict()
|
||||
|
||||
def get_workflow_execution_state(self, cntx, **kwargs):
|
||||
"""Gets the workflow execution state.
|
||||
|
||||
:param cntx: a request context dict
|
||||
:type cntx: dict
|
||||
:param kwargs: a dict of method arguments
|
||||
:type kwargs: dict
|
||||
:return: Current workflow state.
|
||||
"""
|
||||
workbook_name = kwargs.get('workbook_name')
|
||||
execution_id = kwargs.get('execution_id')
|
||||
|
||||
execution = db_api.execution_get(execution_id)
|
||||
|
||||
if not execution:
|
||||
raise exc.EngineException("Workflow execution not found "
|
||||
"[workbook_name=%s, execution_id=%s]"
|
||||
% (workbook_name, execution_id))
|
||||
|
||||
return execution.state
|
||||
|
||||
def get_task_state(self, cntx, **kwargs):
|
||||
"""Gets task state.
|
||||
|
||||
:param cntx: a request context dict
|
||||
:type cntx: dict
|
||||
:param kwargs: a dict of method arguments
|
||||
:type kwargs: dict
|
||||
:return: Current task state.
|
||||
"""
|
||||
task_id = kwargs.get('task_id')
|
||||
|
||||
task = db_api.task_get(task_id)
|
||||
|
||||
if not task:
|
||||
raise exc.EngineException("Task not found.")
|
||||
|
||||
return task.state
|
||||
|
||||
@classmethod
|
||||
def _create_execution(cls, workbook_name, task_name, context):
|
||||
return db_api.execution_create(workbook_name, {
|
||||
"workbook_name": workbook_name,
|
||||
"task": task_name,
|
||||
"state": states.RUNNING,
|
||||
"context": context
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def _add_variables_to_data_flow_context(cls, df_ctx, execution):
|
||||
db_workbook = db_api.workbook_get(execution.workbook_name)
|
||||
|
||||
data_flow.add_openstack_data_to_context(df_ctx, db_workbook)
|
||||
data_flow.add_execution_to_context(df_ctx, execution)
|
||||
|
||||
@classmethod
|
||||
def _create_next_tasks(cls, task, workbook):
|
||||
tasks = workflow.find_tasks_after_completion(task, workbook)
|
||||
|
||||
task_execs = cls._create_tasks(
|
||||
tasks,
|
||||
workbook,
|
||||
task.workbook_name,
|
||||
task.execution_id
|
||||
)
|
||||
|
||||
return workflow.find_resolved_tasks(task_execs)
|
||||
|
||||
@classmethod
|
||||
def _create_tasks(cls, task_list, workbook, workbook_name, execution_id):
|
||||
tasks = {}
|
||||
|
||||
for task in task_list:
|
||||
state, task_runtime_context = retry.get_task_runtime(task)
|
||||
action_spec = workbook.get_action(task.get_full_action_name())
|
||||
|
||||
db_task = db_api.task_create(execution_id, {
|
||||
"name": task.name,
|
||||
"requires": [tasks[name].id for name
|
||||
in task.get_requires()],
|
||||
"task_spec": task.to_dict(),
|
||||
"action_spec": {} if not action_spec
|
||||
else action_spec.to_dict(),
|
||||
"state": state,
|
||||
"tags": task.get_property("tags", None),
|
||||
"task_runtime_context": task_runtime_context,
|
||||
"workbook_name": workbook_name
|
||||
})
|
||||
|
||||
tasks[db_task.name] = db_task
|
||||
|
||||
return tasks.values()
|
||||
|
||||
@classmethod
|
||||
def _get_workbook(cls, workbook_name):
|
||||
wb = db_api.workbook_get(workbook_name)
|
||||
|
||||
return spec_parser.get_workbook_spec_from_yaml(wb.definition)
|
||||
|
||||
@classmethod
|
||||
def _determine_execution_state(cls, execution, tasks):
|
||||
if workflow.is_error(tasks):
|
||||
return states.ERROR
|
||||
|
||||
if workflow.is_success(tasks) or workflow.is_finished(tasks):
|
||||
return states.SUCCESS
|
||||
|
||||
return execution.state
|
||||
|
||||
@classmethod
|
||||
def _update_task(cls, workbook, task, state, task_output):
|
||||
"""Update the task with the runtime information. The outbound_context
|
||||
for this task is also calculated.
|
||||
:return: task, outbound_context. task is the updated task and
|
||||
computed outbound context.
|
||||
"""
|
||||
task_spec = workbook.tasks.get(task.name)
|
||||
task_runtime_context = task.task_runtime_context
|
||||
|
||||
# Compute the outbound_context, state and exec_flow_context.
|
||||
outbound_context = data_flow.get_outbound_context(task, task_output)
|
||||
state, task_runtime_context = retry.get_task_runtime(
|
||||
task_spec, state, outbound_context, task_runtime_context)
|
||||
|
||||
# Update the task.
|
||||
update_values = {
|
||||
"state": state,
|
||||
"output": task_output,
|
||||
"task_runtime_context": task_runtime_context
|
||||
}
|
||||
|
||||
task = db_api.task_update(task["id"], update_values)
|
||||
|
||||
return task, outbound_context
|
||||
|
||||
def _schedule_run(self, workbook, task, outbound_context):
|
||||
"""Schedules task to run after the delay defined in the task
|
||||
specification. If no delay is specified this method is a no-op.
|
||||
"""
|
||||
|
||||
# TODO(rakhmerov): Reavaluate parameter 'context' once it's clear
|
||||
# how to work with trust chains correctly in keystone
|
||||
# (waiting for corresponding changes to be made).
|
||||
def run_delayed_task(context):
|
||||
"""Runs the delayed task. Performs all the steps required to setup
|
||||
a task to run which are not already done. This is mostly code
|
||||
copied over from convey_task_result.
|
||||
|
||||
:param context Mistral authentication context inherited from a
|
||||
caller thread.
|
||||
"""
|
||||
auth_context.set_ctx(context)
|
||||
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
execution_id = task.execution_id
|
||||
execution = db_api.execution_get(execution_id)
|
||||
|
||||
tasks = db_api.tasks_get(execution_id=execution_id)
|
||||
|
||||
# Change state from DELAYED to RUNNING.
|
||||
|
||||
WF_TRACE.info("Task '%s' [%s -> %s]" %
|
||||
(task.name, task.state, states.RUNNING))
|
||||
executables = data_flow.prepare_tasks([task],
|
||||
outbound_context,
|
||||
workbook,
|
||||
tasks)
|
||||
db_api.commit_tx()
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
if states.is_stopped_or_finished(execution.state):
|
||||
return
|
||||
|
||||
for task_id, action_name, action_params in executables:
|
||||
self._run_task(task_id, action_name, action_params)
|
||||
|
||||
task_spec = workbook.tasks.get(task.name)
|
||||
retries, break_on, delay_sec = task_spec.get_retry_parameters()
|
||||
|
||||
if delay_sec > 0:
|
||||
# Run the task after the specified delay.
|
||||
eventlet.spawn_after(delay_sec, run_delayed_task,
|
||||
context=auth_context.ctx())
|
||||
else:
|
||||
LOG.warn("No delay specified for task(id=%s) name=%s. Not "
|
||||
"scheduling for execution." % (task.id, task.name))
|
||||
|
||||
|
||||
class EngineClient(object):
|
||||
"""RPC client for the Engine."""
|
||||
|
||||
def __init__(self, transport):
|
||||
"""Construct an RPC client for the Engine.
|
||||
|
||||
:param transport: a messaging transport handle
|
||||
:type transport: Transport
|
||||
"""
|
||||
serializer = auth_context.RpcContextSerializer(
|
||||
auth_context.JsonPayloadSerializer())
|
||||
target = messaging.Target(topic=cfg.CONF.engine.topic)
|
||||
self._client = messaging.RPCClient(transport, target,
|
||||
serializer=serializer)
|
||||
|
||||
def start_workflow_execution(self, workbook_name, task_name, context=None):
|
||||
"""Starts a workflow execution based on the specified workbook name
|
||||
and target task.
|
||||
|
||||
:param workbook_name: Workbook name
|
||||
:param task_name: Target task name
|
||||
:param context: Execution context which defines a workflow input
|
||||
:return: Workflow execution.
|
||||
"""
|
||||
kwargs = {
|
||||
'workbook_name': workbook_name,
|
||||
'task_name': task_name,
|
||||
'context': context
|
||||
}
|
||||
|
||||
return self._client.call(
|
||||
auth_context.ctx(),
|
||||
'start_workflow_execution',
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def stop_workflow_execution(self, workbook_name, execution_id):
|
||||
"""Stops the workflow execution with the given id.
|
||||
|
||||
:param workbook_name: Workbook name.
|
||||
:param execution_id: Workflow execution id.
|
||||
:return: Workflow execution.
|
||||
"""
|
||||
kwargs = {
|
||||
'workbook_name': workbook_name,
|
||||
'execution_id': execution_id
|
||||
}
|
||||
|
||||
return self._client.call(
|
||||
auth_context.ctx(),
|
||||
'stop_workflow_execution',
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def convey_task_result(self, task_id, state, result):
|
||||
"""Conveys task result to Mistral Engine.
|
||||
|
||||
This method should be used by clients of Mistral Engine to update
|
||||
state of a task once task action has been performed. One of the
|
||||
clients of this method is Mistral REST API server that receives
|
||||
task result from the outside action handlers.
|
||||
|
||||
Note: calling this method serves an event notifying Mistral that
|
||||
it possibly needs to move the workflow on, i.e. run other workflow
|
||||
tasks for which all dependencies are satisfied.
|
||||
|
||||
:param task_id: Task id.
|
||||
:param state: New task state.
|
||||
:param result: Task result data.
|
||||
:return: Task.
|
||||
"""
|
||||
kwargs = {
|
||||
'task_id': task_id,
|
||||
'state': state,
|
||||
'result': result
|
||||
}
|
||||
|
||||
return self._client.call(
|
||||
auth_context.ctx(),
|
||||
'convey_task_result',
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def get_workflow_execution_state(self, workbook_name, execution_id):
|
||||
"""Gets the workflow execution state.
|
||||
|
||||
:param workbook_name: Workbook name.
|
||||
:param execution_id: Workflow execution id.
|
||||
:return: Current workflow state.
|
||||
"""
|
||||
kwargs = {
|
||||
'workbook_name': workbook_name,
|
||||
'execution_id': execution_id
|
||||
}
|
||||
|
||||
return self._client.call(
|
||||
auth_context.ctx(),
|
||||
'get_workflow_execution_state',
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def get_task_state(self, workbook_name, execution_id, task_id):
|
||||
"""Gets task state.
|
||||
|
||||
:param workbook_name: Workbook name.
|
||||
:param execution_id: Workflow execution id.
|
||||
:param task_id: Task id.
|
||||
:return: Current task state.
|
||||
"""
|
||||
kwargs = {
|
||||
'workbook_name': workbook_name,
|
||||
'executioin_id': execution_id,
|
||||
'task_id': task_id
|
||||
}
|
||||
|
||||
return self._client.call(
|
||||
auth_context.ctx(),
|
||||
'get_task_state',
|
||||
**kwargs
|
||||
)
|
@ -1,187 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
import inspect
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral import exceptions as exc
|
||||
from mistral import expressions as expr
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import action_manager as a_m
|
||||
from mistral.services import security
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
||||
_ACTION_CTX_PARAM = 'action_context'
|
||||
|
||||
|
||||
def _has_action_context_param(action_cls):
|
||||
arg_spec = inspect.getargspec(action_cls.__init__)
|
||||
|
||||
return _ACTION_CTX_PARAM in arg_spec.args
|
||||
|
||||
|
||||
def _get_action_context(db_task, openstack_context):
|
||||
result = {
|
||||
'workbook_name': db_task.workbook_name,
|
||||
'execution_id': db_task.execution_id,
|
||||
'task_id': db_task.id,
|
||||
'task_name': db_task.name,
|
||||
'task_tags': db_task.tags,
|
||||
}
|
||||
|
||||
if openstack_context:
|
||||
result.update({'openstack': openstack_context})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def evaluate_task_parameters(task_ex, context):
|
||||
params = task_ex.task_spec.get('parameters', {})
|
||||
|
||||
return expr.evaluate_recursively(params, context)
|
||||
|
||||
|
||||
def build_required_context(task, tasks):
|
||||
context = {}
|
||||
|
||||
for req_task in tasks:
|
||||
dep_ids = task.requires or []
|
||||
|
||||
if req_task.id in dep_ids:
|
||||
_merge_dicts(context, get_outbound_context(req_task))
|
||||
|
||||
return context
|
||||
|
||||
|
||||
def prepare_tasks(tasks_to_start, context, workbook, tasks):
|
||||
results = []
|
||||
|
||||
for task in tasks_to_start:
|
||||
context = _merge_dicts(context, build_required_context(task, tasks))
|
||||
|
||||
action_params = evaluate_task_parameters(task, context)
|
||||
|
||||
db_api.task_update(task.id,
|
||||
{'state': states.RUNNING,
|
||||
'in_context': context,
|
||||
'parameters': action_params})
|
||||
|
||||
# Get action name. Unwrap ad-hoc and reevaluate params if
|
||||
# necessary.
|
||||
action_name = spec_parser.get_task_spec(
|
||||
task.task_spec).get_full_action_name()
|
||||
|
||||
openstack_ctx = context.get('openstack')
|
||||
|
||||
if not a_m.get_action_class(action_name):
|
||||
# If action is not found in registered actions try to find
|
||||
# ad-hoc action definition.
|
||||
if openstack_ctx is not None:
|
||||
action_params.update({'openstack': openstack_ctx})
|
||||
|
||||
action = a_m.resolve_adhoc_action_name(workbook, action_name)
|
||||
|
||||
if not action:
|
||||
msg = ('Unknown action [workbook=%s, action=%s]' %
|
||||
(workbook, action_name))
|
||||
raise exc.ActionException(msg)
|
||||
|
||||
action_params = a_m.convert_adhoc_action_params(workbook,
|
||||
action_name,
|
||||
action_params)
|
||||
action_name = action
|
||||
|
||||
if _has_action_context_param(a_m.get_action_class(action_name)):
|
||||
action_params[_ACTION_CTX_PARAM] = _get_action_context(
|
||||
task, openstack_ctx)
|
||||
|
||||
results.append((task.id, action_name, action_params))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_task_output(task, result):
|
||||
publish_transformer = task.task_spec.get('publish')
|
||||
|
||||
output = expr.evaluate_recursively(publish_transformer, result) or {}
|
||||
|
||||
if result:
|
||||
output['task'] = {task.name: result}
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _merge_dicts(target, src):
|
||||
for key in src:
|
||||
# TODO(nmakhotkin) Take care of the same key in both dicts
|
||||
to_merge = (key in target
|
||||
and isinstance(target[key], dict)
|
||||
and isinstance(src[key], dict))
|
||||
if to_merge:
|
||||
_merge_dicts(target[key], src[key])
|
||||
else:
|
||||
target[key] = src[key]
|
||||
return target
|
||||
|
||||
|
||||
def get_outbound_context(task, output=None):
|
||||
in_context = task.in_context
|
||||
|
||||
out_context = in_context.copy() if in_context else {}
|
||||
|
||||
if not output:
|
||||
output = task.get('output')
|
||||
|
||||
if output:
|
||||
out_context = _merge_dicts(out_context, output)
|
||||
|
||||
return out_context
|
||||
|
||||
|
||||
def add_openstack_data_to_context(context, db_workbook):
|
||||
if context is None:
|
||||
context = {}
|
||||
|
||||
if CONF.pecan.auth_enable:
|
||||
workbook_ctx = security.create_context(
|
||||
db_workbook.trust_id, db_workbook.project_id
|
||||
)
|
||||
|
||||
if workbook_ctx:
|
||||
context.update({'openstack': workbook_ctx.to_dict()})
|
||||
|
||||
return context
|
||||
|
||||
|
||||
def add_execution_to_context(context, db_execution):
|
||||
if context is None:
|
||||
context = {}
|
||||
|
||||
context['__execution'] = {
|
||||
'id': db_execution.id,
|
||||
'workbook_name': db_execution['workbook_name'],
|
||||
'task': db_execution.task
|
||||
}
|
||||
|
||||
return context
|
@ -1,45 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
from mistral import context as auth_context
|
||||
from mistral import engine
|
||||
from mistral.engine import executor
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DefaultEngine(engine.Engine):
|
||||
def _run_task(self, task_id, action_name, params):
|
||||
# TODO(rakhmerov):
|
||||
# This call outside of DB transaction creates a window
|
||||
# when the engine may crash and DB will not be consistent with
|
||||
# the task message queue state. Need to figure out the best
|
||||
# solution to recover from this situation.
|
||||
# However, making this call in DB transaction is really bad
|
||||
# since it makes transaction much longer in time and under load
|
||||
# may overload DB with open transactions.
|
||||
# TODO(m4dcoder): Use a pool for transport and client
|
||||
|
||||
exctr = executor.ExecutorClient(self.transport)
|
||||
|
||||
LOG.info("Submitted task for execution: '%s'" % task_id)
|
||||
|
||||
exctr.handle_task(auth_context.ctx(),
|
||||
task_id=task_id,
|
||||
action_name=action_name,
|
||||
params=params)
|
@ -1,73 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.engine import executor
|
||||
from mistral.engine import states
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import action_manager as a_m
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
WORKFLOW_TRACE = logging.getLogger(cfg.CONF.workflow_trace_log_name)
|
||||
|
||||
|
||||
class DefaultExecutor(executor.Executor):
|
||||
def _log_action_exception(self, message, task_id, action, params, ex):
|
||||
LOG.exception("%s [task_id=%s, action='%s', params='%s']\n %s" %
|
||||
(message, str(task_id), str(action),
|
||||
str(params), str(ex)))
|
||||
|
||||
def handle_task(self, cntx, task_id, action_name, params={}):
|
||||
"""Handle the execution of the workbook task.
|
||||
|
||||
:param task_id: task identifier
|
||||
:type task_id: str
|
||||
:param action_name: a name of the action to run
|
||||
:type action_name: str
|
||||
:param params: a dict of action parameters
|
||||
"""
|
||||
|
||||
action_cls = a_m.get_action_class(action_name)
|
||||
|
||||
# TODO(dzimine): on failure, convey failure details back
|
||||
try:
|
||||
action = action_cls(**params)
|
||||
except Exception as e:
|
||||
raise exc.ActionException("Failed to create action"
|
||||
"[action_name=%s, params=%s]: %s" %
|
||||
(action_name, params, e))
|
||||
|
||||
if action.is_sync():
|
||||
try:
|
||||
state, result = states.SUCCESS, action.run()
|
||||
except exc.ActionException as ex:
|
||||
self._log_action_exception("Action failed", task_id,
|
||||
action_name, params, ex)
|
||||
state, result = states.ERROR, None
|
||||
|
||||
self.engine.convey_task_result(task_id, state, result)
|
||||
else:
|
||||
try:
|
||||
action.run()
|
||||
except exc.ActionException as ex:
|
||||
self._log_action_exception("Action failed", task_id,
|
||||
action_name, params, ex)
|
||||
self.engine.convey_task_result(task_id, states.ERROR, None)
|
@ -1,77 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
import abc
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo import messaging
|
||||
import six
|
||||
from stevedore import driver
|
||||
|
||||
from mistral import context as auth_context
|
||||
from mistral import engine
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_executor(name, transport):
|
||||
mgr = driver.DriverManager(
|
||||
namespace='mistral.executor.drivers',
|
||||
name=name,
|
||||
invoke_on_load=True,
|
||||
invoke_kwds={'transport': transport})
|
||||
return mgr.driver
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Executor(object):
|
||||
"""Abstract class for task execution."""
|
||||
|
||||
def __init__(self, transport=None):
|
||||
self.transport = engine.get_transport(transport)
|
||||
self.engine = engine.EngineClient(self.transport)
|
||||
|
||||
@abc.abstractmethod
|
||||
def handle_task(self, cntx, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ExecutorClient(object):
|
||||
"""RPC client for the Executor."""
|
||||
|
||||
def __init__(self, transport):
|
||||
"""Construct an RPC client for the Executor.
|
||||
|
||||
:param transport: a messaging transport handle
|
||||
:type transport: Transport
|
||||
"""
|
||||
serializer = auth_context.RpcContextSerializer(
|
||||
auth_context.JsonPayloadSerializer())
|
||||
target = messaging.Target(topic=cfg.CONF.executor.topic)
|
||||
self._client = messaging.RPCClient(transport, target,
|
||||
serializer=serializer)
|
||||
|
||||
def handle_task(self, cntx, **kwargs):
|
||||
"""Send the task request to the Executor for execution.
|
||||
|
||||
:param cntx: a request context dict
|
||||
:type cntx: MistralContext
|
||||
:param kwargs: a dict of method arguments
|
||||
:type kwargs: dict
|
||||
"""
|
||||
return self._client.cast(cntx, 'handle_task', **kwargs)
|
@ -1,75 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.engine import states
|
||||
from mistral import expressions
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
|
||||
WORKFLOW_TRACE = logging.getLogger(cfg.CONF.workflow_trace_log_name)
|
||||
|
||||
|
||||
def get_task_runtime(task_spec, state=states.IDLE, outbound_context=None,
|
||||
task_runtime_context=None):
|
||||
"""Computes the state and exec_flow_context runtime properties for a task
|
||||
based on the supplied properties. This method takes the retry nature of a
|
||||
task into consideration.
|
||||
|
||||
:param task_spec: specification of the task
|
||||
:param state: suggested next state
|
||||
:param outbound_context: outbound_context to be used for computation
|
||||
:param task_runtime_context: current flow context
|
||||
:return: state, exec_flow_context tuple. Sample scenarios are,
|
||||
1. state = SUCCESS
|
||||
No need to move to next iteration.
|
||||
2. retry:count = 5, current:count = 2, state = ERROR,
|
||||
state = IDLE/DELAYED, current:count = 3
|
||||
3. retry:count = 5, current:count = 4, state = ERROR
|
||||
Iterations complete therefore state = #{state}, current:count = 4.
|
||||
"""
|
||||
|
||||
if not (state == states.ERROR and task_spec.is_retry_task()):
|
||||
return state, task_runtime_context
|
||||
|
||||
if task_runtime_context is None:
|
||||
task_runtime_context = {}
|
||||
if outbound_context is None:
|
||||
outbound_context = {}
|
||||
|
||||
wf_trace_msg = "Task '%s' [%s -> " % (task_spec.name, state)
|
||||
|
||||
retry_no = -1
|
||||
if "retry_no" in task_runtime_context:
|
||||
retry_no = task_runtime_context["retry_no"]
|
||||
retry_count, break_on, delay = task_spec.get_retry_parameters()
|
||||
|
||||
retries_remain = retry_no + 1 < retry_count
|
||||
break_early = (expressions.evaluate(break_on, outbound_context)
|
||||
if break_on and outbound_context else False)
|
||||
|
||||
if retries_remain and not break_early:
|
||||
state = states.DELAYED if delay > 0 else states.IDLE
|
||||
retry_no += 1
|
||||
|
||||
WORKFLOW_TRACE.info(wf_trace_msg + "%s, delay = %s sec]" % (state, delay))
|
||||
|
||||
task_runtime_context["retry_no"] = retry_no
|
||||
|
||||
return state, task_runtime_context
|
@ -1,40 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
"""Valid task and workflow states."""
|
||||
|
||||
IDLE = 'IDLE'
|
||||
RUNNING = 'RUNNING'
|
||||
SUCCESS = 'SUCCESS'
|
||||
ERROR = 'ERROR'
|
||||
STOPPED = 'STOPPED'
|
||||
DELAYED = 'DELAYED'
|
||||
|
||||
_ALL = [IDLE, RUNNING, SUCCESS, ERROR, STOPPED, DELAYED]
|
||||
|
||||
|
||||
def is_valid(state):
|
||||
return state in _ALL
|
||||
|
||||
|
||||
def is_finished(state):
|
||||
return state in [SUCCESS, ERROR]
|
||||
|
||||
|
||||
def is_stopped_or_finished(state):
|
||||
return state == STOPPED or is_finished(state)
|
@ -1,150 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO(rakhmerov): Deprecated in favor of package 'mistral.engine1'.
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import traversal
|
||||
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def find_workflow_tasks(workbook, task_name):
|
||||
wb_tasks = workbook.tasks
|
||||
full_graph = nx.DiGraph()
|
||||
|
||||
for t in wb_tasks:
|
||||
full_graph.add_node(t)
|
||||
|
||||
_update_dependencies(wb_tasks, full_graph)
|
||||
|
||||
# Find the list of the tasks in the order they're supposed to be executed.
|
||||
task_spec = wb_tasks[task_name]
|
||||
|
||||
return [node for node
|
||||
in traversal.dfs_postorder_nodes(full_graph.reverse(), task_spec)]
|
||||
|
||||
|
||||
def find_resolved_tasks(tasks):
|
||||
# We need to analyse graph and see which tasks are ready to start
|
||||
resolved_tasks = []
|
||||
delayed_tasks = []
|
||||
allows = []
|
||||
for t in tasks:
|
||||
if t['state'] == states.SUCCESS:
|
||||
allows += [t['name']]
|
||||
allow_set = set(allows)
|
||||
for t in tasks:
|
||||
deps = t['task_spec'].get('requires', {}).keys()
|
||||
if len(set(deps) - allow_set) == 0:
|
||||
# all required tasks, if any, are SUCCESS
|
||||
if t['state'] == states.IDLE:
|
||||
resolved_tasks.append(t)
|
||||
elif t['state'] == states.DELAYED:
|
||||
delayed_tasks.append(t)
|
||||
return resolved_tasks, delayed_tasks
|
||||
|
||||
|
||||
def _get_checked_tasks(target_tasks):
|
||||
checked_tasks = []
|
||||
for t in target_tasks:
|
||||
# TODO(nmakhotkin): see and evaluate YAQL with data from context
|
||||
checked_tasks.append(t)
|
||||
return checked_tasks
|
||||
|
||||
|
||||
def _get_tasks_to_schedule(target_tasks, workbook):
|
||||
tasks_to_schedule = _get_checked_tasks(target_tasks)
|
||||
return [workbook.tasks.get(t_name) for t_name in tasks_to_schedule]
|
||||
|
||||
|
||||
def find_tasks_after_completion(task, workbook):
|
||||
"""Determine tasks which should be scheduled after completing
|
||||
given task. Expression 'on_finish' is not mutually exclusive to
|
||||
'on_success' and 'on_error'.
|
||||
|
||||
:param task: Task object
|
||||
:param workbook: Workbook Entity
|
||||
:return: list of task dictionaries.
|
||||
"""
|
||||
state = task['state']
|
||||
found_tasks = []
|
||||
LOG.debug("Recieved task %s: %s" % (task['name'], state))
|
||||
|
||||
if state == states.ERROR:
|
||||
tasks_on_error = workbook.tasks.get(task['name']).get_on_error()
|
||||
if tasks_on_error:
|
||||
found_tasks = _get_tasks_to_schedule(tasks_on_error, workbook)
|
||||
|
||||
elif state == states.SUCCESS:
|
||||
tasks_on_success = workbook.tasks.get(task['name']).get_on_success()
|
||||
if tasks_on_success:
|
||||
found_tasks = _get_tasks_to_schedule(tasks_on_success, workbook)
|
||||
|
||||
if states.is_finished(state):
|
||||
tasks_on_finish = workbook.tasks.get(task['name']).get_on_finish()
|
||||
if tasks_on_finish:
|
||||
found_tasks += _get_tasks_to_schedule(tasks_on_finish, workbook)
|
||||
|
||||
LOG.debug("Found tasks: %s" % found_tasks)
|
||||
|
||||
workflow_tasks = []
|
||||
for t in found_tasks:
|
||||
workflow_tasks += find_workflow_tasks(workbook, t.name)
|
||||
|
||||
LOG.debug("Workflow tasks to schedule: %s" % workflow_tasks)
|
||||
|
||||
return workflow_tasks
|
||||
|
||||
|
||||
def is_finished(tasks):
|
||||
return all(states.is_finished(task['state']) for task in tasks)
|
||||
|
||||
|
||||
def is_success(tasks):
|
||||
return all(task['state'] == states.SUCCESS for task in tasks)
|
||||
|
||||
|
||||
def is_error(tasks):
|
||||
return any(task['state'] == states.ERROR and
|
||||
not task['task_spec'].get('on-error', {}) and
|
||||
not task['task_spec'].get('on-finish', {}) for task in tasks)
|
||||
|
||||
|
||||
def _get_dependency_tasks(tasks_spec, task_spec):
|
||||
dep_task_names = tasks_spec[task_spec.name].get_requires()
|
||||
|
||||
if len(dep_task_names) == 0:
|
||||
return []
|
||||
|
||||
dep_t_specs = set()
|
||||
|
||||
for t_spec in tasks_spec:
|
||||
for t_name in dep_task_names:
|
||||
if t_name == t_spec.name:
|
||||
dep_t_specs.add(t_spec)
|
||||
|
||||
return dep_t_specs
|
||||
|
||||
|
||||
def _update_dependencies(tasks_spec, graph):
|
||||
for t_spec in tasks_spec:
|
||||
for dep_t_spec in _get_dependency_tasks(tasks_spec, t_spec):
|
||||
graph.add_edge(dep_t_spec, t_spec)
|
@ -30,6 +30,18 @@ _ENGINE_CLIENT = None
|
||||
_EXECUTOR_CLIENT = None
|
||||
|
||||
|
||||
def cleanup():
|
||||
"""Intended to be used by tests to recreate all RPC related objects."""
|
||||
|
||||
global _TRANSPORT
|
||||
global _ENGINE_CLIENT
|
||||
global _EXECUTOR_CLIENT
|
||||
|
||||
_TRANSPORT = None
|
||||
_ENGINE_CLIENT = None
|
||||
_EXECUTOR_CLIENT = None
|
||||
|
||||
|
||||
def get_transport():
|
||||
global _TRANSPORT
|
||||
|
||||
|
@ -15,56 +15,18 @@
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import context as auth_ctx
|
||||
from mistral.db.v1 import api as db_api_v1
|
||||
from mistral.db.v2 import api as db_api_v2
|
||||
from mistral import engine
|
||||
from mistral.engine1 import rpc
|
||||
from mistral.openstack.common import log
|
||||
from mistral.openstack.common import periodic_task
|
||||
from mistral.openstack.common import threadgroup
|
||||
from mistral.services import security
|
||||
from mistral.services import triggers
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class MistralPeriodicTasks(periodic_task.PeriodicTasks):
|
||||
|
||||
def __init__(self, transport=None):
|
||||
super(MistralPeriodicTasks, self).__init__()
|
||||
|
||||
self.transport = engine.get_transport(transport)
|
||||
self.engine = engine.EngineClient(self.transport)
|
||||
|
||||
@periodic_task.periodic_task(spacing=1, run_immediately=True)
|
||||
def process_cron_triggers_v1(self, ctx):
|
||||
for t in triggers.get_next_triggers_v1():
|
||||
LOG.debug("Processing cron trigger %s" % t)
|
||||
# Setup admin context before schedule triggers.
|
||||
wb = db_api_v1.workbook_get(t['workbook_name'])
|
||||
auth_ctx.set_ctx(
|
||||
security.create_context(wb.trust_id, wb.project_id)
|
||||
)
|
||||
|
||||
try:
|
||||
task = spec_parser.get_workbook_spec_from_yaml(
|
||||
wb['definition']).get_trigger_task_name(t['name'])
|
||||
|
||||
self.engine.start_workflow_execution(wb['name'], task)
|
||||
finally:
|
||||
next_time = triggers.get_next_execution_time(
|
||||
t['pattern'],
|
||||
t['next_execution_time']
|
||||
)
|
||||
|
||||
db_api_v1.trigger_update(
|
||||
t['id'],
|
||||
{'next_execution_time': next_time}
|
||||
)
|
||||
|
||||
auth_ctx.set_ctx(None)
|
||||
|
||||
@periodic_task.periodic_task(spacing=1, run_immediately=True)
|
||||
def process_cron_triggers_v2(self, ctx):
|
||||
for t in triggers.get_next_cron_triggers():
|
||||
@ -100,9 +62,9 @@ class MistralPeriodicTasks(periodic_task.PeriodicTasks):
|
||||
auth_ctx.set_ctx(None)
|
||||
|
||||
|
||||
def setup(transport):
|
||||
def setup():
|
||||
tg = threadgroup.ThreadGroup()
|
||||
pt = MistralPeriodicTasks(transport=transport)
|
||||
pt = MistralPeriodicTasks()
|
||||
|
||||
ctx = auth_ctx.MistralContext(
|
||||
user_id=None,
|
||||
|
@ -15,64 +15,15 @@
|
||||
from croniter import croniter
|
||||
import datetime
|
||||
|
||||
from mistral.db.v1 import api as db_api_v1
|
||||
from mistral.db.v2 import api as db_api_v2
|
||||
from mistral import exceptions as exc
|
||||
from mistral.services import security
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
def get_next_execution_time(pattern, start_time):
|
||||
return croniter(pattern, start_time).get_next(datetime.datetime)
|
||||
|
||||
|
||||
# Triggers v1.
|
||||
|
||||
def get_next_triggers_v1():
|
||||
return db_api_v1.get_next_triggers(datetime.datetime.now() +
|
||||
datetime.timedelta(0, 2))
|
||||
|
||||
|
||||
def create_trigger_v1(name, pattern, workbook_name, start_time=None):
|
||||
if not start_time:
|
||||
start_time = datetime.datetime.now()
|
||||
|
||||
return db_api_v1.trigger_create({
|
||||
"name": name,
|
||||
"pattern": pattern,
|
||||
"next_execution_time": get_next_execution_time(pattern, start_time),
|
||||
"workbook_name": workbook_name
|
||||
})
|
||||
|
||||
|
||||
def create_associated_triggers(db_workbook):
|
||||
if not db_workbook.definition:
|
||||
return
|
||||
|
||||
wb_spec = spec_parser.get_workbook_spec_from_yaml(
|
||||
db_workbook.definition
|
||||
)
|
||||
|
||||
triggers = wb_spec.get_triggers()
|
||||
|
||||
# Prepare all triggers data in advance to make db transaction shorter.
|
||||
db_triggers = []
|
||||
|
||||
for e in triggers:
|
||||
pattern = e['parameters']['cron-pattern']
|
||||
next_time = get_next_execution_time(pattern, datetime.datetime.now())
|
||||
db_triggers.append({
|
||||
"name": e['name'],
|
||||
"pattern": pattern,
|
||||
"next_execution_time": next_time,
|
||||
"workbook_name": db_workbook.name
|
||||
})
|
||||
|
||||
with db_api_v1.transaction():
|
||||
for e in db_triggers:
|
||||
db_api_v1.trigger_create(e)
|
||||
|
||||
|
||||
# Triggers v2.
|
||||
|
||||
def get_next_cron_triggers():
|
||||
|
@ -14,26 +14,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.db.v1 import api as db_api_v1
|
||||
from mistral.db.v2 import api as db_api_v2
|
||||
from mistral.services import security
|
||||
from mistral.services import triggers
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
def create_workbook_v1(values, scope='private'):
|
||||
return db_api_v1.workbook_create(values)
|
||||
|
||||
|
||||
def update_workbook_v1(workbook_name, values):
|
||||
wb_db = db_api_v1.workbook_update(workbook_name, values)
|
||||
|
||||
if 'definition' in values:
|
||||
triggers.create_associated_triggers(wb_db)
|
||||
|
||||
return wb_db
|
||||
|
||||
|
||||
def create_workbook_v2(definition, scope='private'):
|
||||
wb_values = _get_workbook_values(
|
||||
spec_parser.get_workbook_spec_from_yaml(definition),
|
||||
|
@ -19,22 +19,15 @@ import sys
|
||||
import time
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo import messaging
|
||||
from oslo.messaging import transport
|
||||
from oslotest import base
|
||||
from stevedore import driver
|
||||
import testtools.matchers as ttm
|
||||
|
||||
from mistral import context as auth_context
|
||||
from mistral.db.sqlalchemy import base as db_sa_base
|
||||
from mistral.db.sqlalchemy import sqlite_lock
|
||||
from mistral.db.v1 import api as db_api_v1
|
||||
from mistral.db.v2 import api as db_api_v2
|
||||
from mistral import engine
|
||||
from mistral.engine import executor
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import action_manager
|
||||
from mistral.services import scheduler
|
||||
from mistral import version
|
||||
|
||||
|
||||
@ -48,33 +41,6 @@ def get_resource(resource_name):
|
||||
RESOURCES_PATH + resource_name)).read()
|
||||
|
||||
|
||||
# TODO(rakhmerov): Remove together with the current engine implementation.
|
||||
def get_fake_transport():
|
||||
# Get transport here to let oslo.messaging setup default config
|
||||
# before changing the rpc_backend to the fake driver; otherwise,
|
||||
# oslo.messaging will throw exception.
|
||||
messaging.get_transport(cfg.CONF)
|
||||
|
||||
cfg.CONF.set_default('rpc_backend', 'fake')
|
||||
|
||||
url = transport.TransportURL.parse(cfg.CONF, None, None)
|
||||
|
||||
kwargs = dict(
|
||||
default_exchange=cfg.CONF.control_exchange,
|
||||
allowed_remote_exmods=[]
|
||||
)
|
||||
|
||||
mgr = driver.DriverManager(
|
||||
'oslo.messaging.drivers',
|
||||
url.transport,
|
||||
invoke_on_load=True,
|
||||
invoke_args=[cfg.CONF, url],
|
||||
invoke_kwds=kwargs
|
||||
)
|
||||
|
||||
return transport.Transport(mgr.driver)
|
||||
|
||||
|
||||
class BaseTest(base.BaseTestCase):
|
||||
def assertListEqual(self, l1, l2):
|
||||
if tuple(sys.version_info)[0:2] < (2, 7):
|
||||
@ -198,18 +164,11 @@ class DbTestCase(BaseTest):
|
||||
cfg.CONF.set_default('max_overflow', -1, group='database')
|
||||
cfg.CONF.set_default('max_pool_size', 1000, group='database')
|
||||
|
||||
db_api_v1.setup_db()
|
||||
db_api_v2.setup_db()
|
||||
|
||||
action_manager.sync_db()
|
||||
|
||||
def _clean_db(self):
|
||||
with db_api_v1.transaction():
|
||||
db_api_v1.workbooks_delete()
|
||||
db_api_v1.executions_delete()
|
||||
db_api_v1.triggers_delete()
|
||||
db_api_v1.tasks_delete()
|
||||
|
||||
with db_api_v2.transaction():
|
||||
db_api_v2.delete_workbooks()
|
||||
db_api_v2.delete_executions()
|
||||
@ -238,81 +197,3 @@ class DbTestCase(BaseTest):
|
||||
|
||||
def is_db_session_open(self):
|
||||
return db_sa_base._get_thread_local_session() is not None
|
||||
|
||||
|
||||
# TODO(rakhmerov): Remove together with the current engine implementation.
|
||||
class EngineTestCase(DbTestCase):
|
||||
transport = get_fake_transport()
|
||||
backend = engine.get_engine(cfg.CONF.engine.engine, transport)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EngineTestCase, self).__init__(*args, **kwargs)
|
||||
|
||||
self.engine = engine.EngineClient(self.transport)
|
||||
|
||||
def setUp(self):
|
||||
super(EngineTestCase, self).setUp()
|
||||
|
||||
self.addCleanup(scheduler.stop_all_schedulers)
|
||||
|
||||
@classmethod
|
||||
def mock_task_result(cls, task_id, state, result):
|
||||
"""Mock the engine convey_task_results to send request directly
|
||||
to the engine instead of going through the oslo.messaging transport.
|
||||
"""
|
||||
kwargs = {
|
||||
'task_id': task_id,
|
||||
'state': state,
|
||||
'result': result
|
||||
}
|
||||
|
||||
return cls.backend.convey_task_result({}, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def mock_start_workflow(cls, workbook_name, task_name, context=None):
|
||||
"""Mock the engine start_workflow_execution to send request directly
|
||||
to the engine instead of going through the oslo.messaging transport.
|
||||
"""
|
||||
kwargs = {
|
||||
'workbook_name': workbook_name,
|
||||
'task_name': task_name,
|
||||
'context': context
|
||||
}
|
||||
|
||||
return cls.backend.start_workflow_execution({}, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def mock_get_workflow_state(cls, workbook_name, execution_id):
|
||||
"""Mock the engine get_workflow_execution_state to send request
|
||||
directly to the engine instead of going through the oslo.messaging
|
||||
transport.
|
||||
"""
|
||||
kwargs = {
|
||||
'workbook_name': workbook_name,
|
||||
'execution_id': execution_id
|
||||
}
|
||||
|
||||
return cls.backend.get_workflow_execution_state({}, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def mock_run_task(cls, task_id, action_name, params):
|
||||
"""Mock the engine _run_tasks to send requests directly to the task
|
||||
executor instead of going through the oslo.messaging transport.
|
||||
"""
|
||||
exctr = executor.get_executor(cfg.CONF.engine.engine, cls.transport)
|
||||
|
||||
exctr.handle_task(
|
||||
auth_context.ctx(),
|
||||
task_id=task_id,
|
||||
action_name=action_name,
|
||||
params=params
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def mock_handle_task(cls, cntx, **kwargs):
|
||||
"""Mock the executor handle_task to send requests directory to the task
|
||||
executor instead of going through the oslo.messaging transport.
|
||||
"""
|
||||
exctr = executor.get_executor(cfg.CONF.engine.engine, cls.transport)
|
||||
|
||||
return exctr.handle_task(cntx, **kwargs)
|
||||
|
@ -1,183 +0,0 @@
|
||||
# Copyright 2013 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from tempest import test
|
||||
from tempest_lib import exceptions
|
||||
|
||||
from mistral.tests.functional import base
|
||||
|
||||
|
||||
class WorkbookTestsV1(base.TestCase):
|
||||
|
||||
_service = 'workflow'
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_get_list_obj(self):
|
||||
resp, _ = self.client.get_list_obj('')
|
||||
self.assertEqual(200, resp.status)
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_get_list_workbooks(self):
|
||||
resp, body = self.client.get_list_obj('workbooks')
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertEqual([], body['workbooks'])
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_create_and_delete_workbook(self):
|
||||
resp, body = self.client.create_workbook('test')
|
||||
|
||||
self.assertEqual(201, resp.status)
|
||||
self.assertEqual('test', body['name'])
|
||||
|
||||
resp, body = self.client.get_list_obj('workbooks')
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertEqual('test', body['workbooks'][0]['name'])
|
||||
|
||||
self.client.delete_obj('workbooks', 'test')
|
||||
self.client.workbooks.remove('test')
|
||||
|
||||
_, body = self.client.get_list_obj('workbooks')
|
||||
|
||||
self.assertEqual([], body['workbooks'])
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_get_workbook(self):
|
||||
self.client.create_workbook('test')
|
||||
resp, body = self.client.get_list_obj('workbooks/test')
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertEqual('test', body['name'])
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_update_workbook(self):
|
||||
self.client.create_workbook('test')
|
||||
resp, body = self.client.update_workbook('test')
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertEqual('test', body['name'])
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_get_workbook_definition(self):
|
||||
self.client.create_workbook('test')
|
||||
self.client.upload_workbook_definition('test')
|
||||
resp, body = self.client.get_workbook_definition('test')
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertIsNotNone(body)
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_upload_workbook_definition(self):
|
||||
self.client.create_workbook('test1')
|
||||
resp, body = self.client.upload_workbook_definition(
|
||||
'test1')
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertIsNotNone(body)
|
||||
|
||||
@test.attr(type='negative')
|
||||
def test_get_nonexistent_workbook_definition(self):
|
||||
self.assertRaises(exceptions.NotFound,
|
||||
self.client.get_workbook_definition,
|
||||
'nonexist')
|
||||
|
||||
@test.attr(type='negative')
|
||||
def test_get_nonexistent_workbook(self):
|
||||
self.assertRaises(exceptions.NotFound, self.client.get_object,
|
||||
'workbooks', 'nonexist')
|
||||
|
||||
@test.attr(type='negative')
|
||||
def test_double_create_obj(self):
|
||||
self.client.create_workbook('test')
|
||||
|
||||
self.assertRaises(exceptions.Conflict, self.client.create_workbook,
|
||||
'test')
|
||||
|
||||
self.client.delete_obj('workbooks', 'test')
|
||||
self.client.workbooks.remove('test')
|
||||
_, body = self.client.get_list_obj('workbooks')
|
||||
|
||||
self.assertEqual([], body['workbooks'])
|
||||
|
||||
|
||||
class ExecutionTestsV1(base.TestCase):
|
||||
|
||||
_service = 'workflow'
|
||||
|
||||
def setUp(self):
|
||||
super(ExecutionTestsV1, self).setUp()
|
||||
|
||||
self.client.create_workbook('test')
|
||||
self.client.upload_workbook_definition('test')
|
||||
self.entity_type = 'workbook_name'
|
||||
self.entity_name = 'test'
|
||||
|
||||
def tearDown(self):
|
||||
super(ExecutionTestsV1, self).tearDown()
|
||||
|
||||
for ex in self.client.executions:
|
||||
self.client.delete_obj('executions', ex)
|
||||
self.client.executions = []
|
||||
|
||||
@test.attr(type='positive')
|
||||
def test_create_execution(self):
|
||||
resp, body = self.client.create_execution(self.entity_name)
|
||||
|
||||
self.assertEqual(201, resp.status)
|
||||
self.assertEqual(self.entity_name, body[self.entity_type])
|
||||
|
||||
@test.attr(type='positive')
|
||||
def test_get_execution(self):
|
||||
_, execution = self.client.create_execution(self.entity_name)
|
||||
|
||||
resp, body = self.client.get_object('executions', execution['id'])
|
||||
|
||||
del execution['state']
|
||||
del body['state']
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertEqual(execution['id'], body['id'])
|
||||
|
||||
@test.attr(type='positive')
|
||||
def test_update_execution(self):
|
||||
_, execution = self.client.create_execution(self.entity_name)
|
||||
|
||||
resp, body = self.client.update_execution(
|
||||
execution['id'], '{}')
|
||||
|
||||
body = json.loads(body)
|
||||
del execution['state']
|
||||
del body['state']
|
||||
|
||||
self.assertEqual(200, resp.status)
|
||||
self.assertEqual(execution['id'], body['id'])
|
||||
|
||||
@test.attr(type='negative')
|
||||
def test_get_nonexistent_execution(self):
|
||||
self.assertRaises(exceptions.NotFound, self.client.get_object,
|
||||
'executions', str(uuid.uuid4()))
|
||||
|
||||
@test.attr(type='negative')
|
||||
def test_update_nonexistent_execution(self):
|
||||
id = str(uuid.uuid4())
|
||||
put_body = {
|
||||
"state": "STOPPED"
|
||||
}
|
||||
|
||||
self.assertRaises(exceptions.NotFound, self.client.update_execution,
|
||||
id, put_body)
|
@ -1,202 +0,0 @@
|
||||
# Copyright 2013 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
|
||||
from mistral.tests.functional import base
|
||||
|
||||
|
||||
CONTEXT = {
|
||||
'person': {
|
||||
'first_name': 'John',
|
||||
'last_name': 'Doe',
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MistralWorkflowExecutionTests(base.TestCaseAdvanced):
|
||||
|
||||
_service = 'workflow'
|
||||
|
||||
def test_reverse_flow(self):
|
||||
text = base.get_resource(
|
||||
'data_flow/task_with_diamond_dependencies.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', CONTEXT, 'send_greeting')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_full_name')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_full_name']['string'],
|
||||
"John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_address')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_address']['string'],
|
||||
"To John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_greeting']['string'],
|
||||
"Dear John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'send_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['send_greeting']['string'],
|
||||
"To John Doe. Dear John Doe,..")
|
||||
|
||||
def test_task_with_two_dependencies(self):
|
||||
text = base.get_resource(
|
||||
'data_flow/task_with_two_dependencies.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', CONTEXT, 'send_greeting')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_full_name')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_full_name']['full_name'],
|
||||
"John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_greeting']['greeting'],
|
||||
"Cheers!")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'send_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertTrue(task_output['task']['send_greeting']['greeting_sent'])
|
||||
|
||||
def test_direct_flow_tasks_on_success(self):
|
||||
text = base.get_resource(
|
||||
'data_flow/three_subsequent_tasks.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', CONTEXT, 'build_full_name')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_full_name')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_full_name']['full_name'],
|
||||
"John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_greeting']['greeting'],
|
||||
"Hello, John Doe!")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'send_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertTrue(task_output['task']['send_greeting']['greeting_sent'])
|
||||
|
||||
def test_two_dependent_tasks(self):
|
||||
text = base.get_resource(
|
||||
'data_flow/two_dependent_tasks.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', CONTEXT, 'build_greeting')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_full_name')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_full_name']['full_name'],
|
||||
"John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_greeting']['greeting'],
|
||||
"Hello, John Doe!")
|
||||
|
||||
def test_two_subsequent_tasks(self):
|
||||
text = base.get_resource(
|
||||
'data_flow/two_subsequent_tasks.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', CONTEXT, 'build_full_name')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_full_name')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['build_full_name']['full_name'],
|
||||
"John Doe")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'build_greeting')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(
|
||||
task_output['task']['build_greeting']['greeting']['greet_message'],
|
||||
"Hello, John Doe!")
|
||||
|
||||
def test_mixed_workflow(self):
|
||||
text = base.get_resource(
|
||||
'test_mixed_flow.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', {}, 'task2')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'task1')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['task1']['string'],
|
||||
"workflow is")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'task2')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(
|
||||
task_output['task']['task2']['string'],
|
||||
"workflow is complete")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'task3')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(
|
||||
task_output['task']['task3']['string'],
|
||||
"workflow is complete !")
|
||||
|
||||
def test_direct_workflow_all_keywords(self):
|
||||
text = base.get_resource(
|
||||
'test_direct_flow_all_keywords.yaml')
|
||||
self.client.prepare_workbook('test', text)
|
||||
|
||||
_, ex = self.client.create_execution_wait_success(
|
||||
'test', {}, 'task1')
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'task2')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(task_output['task']['task2']['string'],
|
||||
"workflow is")
|
||||
|
||||
task = self.client.get_task_by_name('test', ex['id'],
|
||||
'task4')
|
||||
task_output = json.loads(task['output'])
|
||||
self.assertEqual(
|
||||
task_output['task']['task4']['string'],
|
||||
"workflow is complete!")
|
@ -105,113 +105,6 @@ class MistralClientBase(rest_client.RestClient):
|
||||
return True
|
||||
|
||||
|
||||
class MistralClientV1(MistralClientBase):
|
||||
|
||||
def create_workbook(self, name):
|
||||
post_body = '{"name": "%s"}' % name
|
||||
resp, body = self.post('workbooks', post_body)
|
||||
|
||||
self.workbooks.append(name)
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def update_workbook(self, name):
|
||||
post_body = '{"name": "%s"}' % name
|
||||
resp, body = self.put('workbooks/{name}'.format(name=name),
|
||||
post_body)
|
||||
return resp, json.loads(body)
|
||||
|
||||
def get_workbook_definition(self, name):
|
||||
headers = {'X-Auth-Token': self.auth_provider.get_token()}
|
||||
return self.get('workbooks/{name}/definition'.format(name=name),
|
||||
headers)
|
||||
|
||||
def upload_workbook_definition(self, name):
|
||||
headers = {'Content-Type': 'text/plain',
|
||||
'X-Auth-Token': self.auth_provider.get_token()}
|
||||
text = get_resource('wb_v1.yaml')
|
||||
|
||||
return self.put('workbooks/{name}/definition'.format(name=name),
|
||||
text, headers)
|
||||
|
||||
def create_execution(self, workbook_name, post_body=None):
|
||||
if post_body is None:
|
||||
body = {
|
||||
"workbook_name": workbook_name,
|
||||
"task": 'hello',
|
||||
"context": ''
|
||||
}
|
||||
else:
|
||||
body = post_body
|
||||
|
||||
rest, body = self.post('workbooks/{name}/executions'.format(
|
||||
name=workbook_name), json.dumps(body))
|
||||
|
||||
self.executions.append(json.loads(body)['id'])
|
||||
|
||||
return rest, json.loads(body)
|
||||
|
||||
def update_execution(self, execution_id, put_body):
|
||||
return self.put('executions/{execution}'.format(
|
||||
execution=execution_id), json.dumps(put_body))
|
||||
|
||||
def get_tasks_list(self, workbook_name, execution_id):
|
||||
resp, body = self.get(
|
||||
'/workbooks/{name}/executions/{execution}/tasks'.format(
|
||||
name=workbook_name,
|
||||
execution=execution_id))
|
||||
|
||||
return resp, json.loads(body)['tasks']
|
||||
|
||||
def get_task(self, workbook_name, execution_id, task_id):
|
||||
resp, body = self.get(
|
||||
'/workbooks/{name}/executions/{execution}/tasks/{task}'.format(
|
||||
name=workbook_name,
|
||||
execution=execution_id,
|
||||
task=task_id))
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def update_task(self, task_id, put_body):
|
||||
resp, body = self.put('tasks/{task}'.format(
|
||||
task=task_id), json.dumps(put_body))
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def prepare_workbook(self, name, text):
|
||||
headers = {'Content-Type': 'text/plain',
|
||||
'X-Auth-Token': self.auth_provider.get_token()}
|
||||
|
||||
return self.put('workbooks/{name}/definition'.format(name=name),
|
||||
text, headers)
|
||||
|
||||
def create_execution_wait_success(self, workbook_name,
|
||||
context, task, timeout=180):
|
||||
|
||||
body = {
|
||||
"workbook_name": workbook_name,
|
||||
"task": task,
|
||||
"context": json.dumps(context)
|
||||
}
|
||||
|
||||
resp, ex_body = self.create_execution(workbook_name, body)
|
||||
|
||||
self.wait_execution_success(ex_body, timeout)
|
||||
|
||||
resp, ex_body = self.get_object('executions', ex_body['id'])
|
||||
|
||||
return resp, ex_body
|
||||
|
||||
def get_task_by_name(self, workbook_name, execution_id, name):
|
||||
_, tasks = self.get_tasks_list(workbook_name, execution_id)
|
||||
for task in tasks:
|
||||
if task['name'] == name:
|
||||
_, task_body = self.get_task(
|
||||
workbook_name, execution_id, task['id'])
|
||||
|
||||
return task_body
|
||||
|
||||
|
||||
class MistralClientV2(MistralClientBase):
|
||||
|
||||
def post_request(self, url, file_name):
|
||||
@ -335,9 +228,6 @@ class TestCase(test.BaseTestCase):
|
||||
else:
|
||||
cls.mgr = clients.Manager()
|
||||
|
||||
if cls._service == 'workflow':
|
||||
cls.client = MistralClientV1(
|
||||
cls.mgr.auth_provider, cls._service)
|
||||
if cls._service == 'workflowv2':
|
||||
cls.client = MistralClientV2(
|
||||
cls.mgr.auth_provider, cls._service)
|
||||
|
@ -1,106 +0,0 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
from tempest.config import cfg
|
||||
from tempest import test
|
||||
|
||||
from mistral.tests.functional import base
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class OpenStackActionsTest(base.TestCaseAdvanced):
|
||||
|
||||
_service = 'workflow'
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(OpenStackActionsTest, cls).setUpClass()
|
||||
|
||||
cls.identity_client = cls.mgr.identity_v3_client
|
||||
cls.images_client = cls.mgr.images_client
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_nova_actions(self):
|
||||
nova_wb = base.get_resource(
|
||||
'openstack/nova_actions.yaml')
|
||||
self.client.prepare_workbook(self.workbook_name, nova_wb)
|
||||
|
||||
context = {
|
||||
'server_name': 'mistral-test',
|
||||
'image_ref': self.image_ref,
|
||||
'flavor_ref': self.flavor_ref
|
||||
}
|
||||
|
||||
_, execution = self.client.create_execution_wait_success(
|
||||
self.workbook_name, context, 'server_create')
|
||||
_, task_list = self.client.get_tasks_list(self.workbook_name,
|
||||
execution['id'])
|
||||
final_task = base.find_items(task_list,
|
||||
name='wait_instance', state='SUCCESS')
|
||||
|
||||
self.assertIsNotNone(final_task)
|
||||
self.assertEqual('SUCCESS', execution['state'])
|
||||
|
||||
server_id = json.loads(final_task['output'])['instance_id']
|
||||
server = self.server_client.get_server(server_id)
|
||||
|
||||
self.assertEqual('ACTIVE', server['status'])
|
||||
|
||||
self.server_client.delete_server(server_id)
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_keystone_actions(self):
|
||||
keystone_wb = base.get_resource(
|
||||
'openstack/keystone_actions.yaml')
|
||||
self.client.prepare_workbook(self.workbook_name,
|
||||
keystone_wb)
|
||||
_, execution = self.client.create_execution_wait_success(
|
||||
self.workbook_name, context={}, task='get_some_endpoint')
|
||||
_, tasks = self.client.get_tasks_list(self.workbook_name,
|
||||
execution['id'])
|
||||
final_task = base.find_items(tasks, name="get_some_endpoint",
|
||||
state='SUCCESS')
|
||||
|
||||
self.assertIsNotNone(final_task)
|
||||
self.assertEqual('SUCCESS', execution['state'])
|
||||
|
||||
output = json.loads(final_task['output'])
|
||||
url = output['endpoint_url']
|
||||
self.assertIn("http://", url)
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_glance_actions(self):
|
||||
glance_wb = base.get_resource(
|
||||
'openstack/glance_actions.yaml')
|
||||
self.client.prepare_workbook(self.workbook_name,
|
||||
glance_wb)
|
||||
|
||||
_, execution = self.client.create_execution_wait_success(
|
||||
self.workbook_name, context={}, task='image_list')
|
||||
_, task_list = self.client.get_tasks_list(self.workbook_name,
|
||||
execution['id'])
|
||||
final_task = base.find_items(task_list,
|
||||
name='image_get', state='SUCCESS')
|
||||
|
||||
self.assertIsNotNone(final_task)
|
||||
self.assertEqual('SUCCESS', execution['state'])
|
||||
|
||||
output = json.loads(final_task['output'])
|
||||
image = self.images_client.get_image(output['image_id'])
|
||||
|
||||
self.assertEqual(output['image_name'], image['name'])
|
@ -20,7 +20,6 @@ import pecan
|
||||
import pecan.testing
|
||||
from webtest import app as webtest_app
|
||||
|
||||
from mistral.db.v1.sqlalchemy import models
|
||||
from mistral.tests import base
|
||||
|
||||
# Disable authentication for functional tests.
|
||||
@ -29,56 +28,6 @@ cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
__all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
# Group of methods to mock DB API calls.
|
||||
|
||||
def create_db_workbook(values):
|
||||
wb = models.Workbook()
|
||||
wb.update(values)
|
||||
return wb
|
||||
|
||||
|
||||
def create_mock_workbook(values):
|
||||
return mock.MagicMock(return_value=create_db_workbook(values))
|
||||
|
||||
|
||||
def create_mock_workbooks(arr_of_values):
|
||||
return mock.MagicMock(
|
||||
return_value=[create_db_workbook(values) for values in arr_of_values]
|
||||
)
|
||||
|
||||
|
||||
def create_db_execution(values):
|
||||
ex = models.WorkflowExecution()
|
||||
ex.update(values)
|
||||
return ex
|
||||
|
||||
|
||||
def create_mock_execution(values):
|
||||
return mock.MagicMock(return_value=create_db_execution(values))
|
||||
|
||||
|
||||
def create_mock_executions(arr_of_values):
|
||||
return mock.MagicMock(
|
||||
return_value=[create_db_execution(values) for values in arr_of_values]
|
||||
)
|
||||
|
||||
|
||||
def create_db_task(values):
|
||||
t = models.Task()
|
||||
t.update(values)
|
||||
return t
|
||||
|
||||
|
||||
def create_mock_task(values):
|
||||
return mock.MagicMock(return_value=create_db_task(values))
|
||||
|
||||
|
||||
def create_mock_tasks(arr_of_values):
|
||||
return mock.MagicMock(
|
||||
return_value=[create_db_task(values) for values in arr_of_values]
|
||||
)
|
||||
|
||||
|
||||
class FunctionalTest(base.DbTestCase):
|
||||
"""Used for functional tests where you need to test your
|
||||
literal application and its integration with the framework.
|
||||
|
@ -15,14 +15,10 @@
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from keystonemiddleware import auth_token
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
import pecan
|
||||
import pecan.testing
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.db.v1.sqlalchemy import models
|
||||
from mistral.openstack.common import timeutils
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
@ -61,12 +57,6 @@ PKI_TOKEN_VERIFIED = {
|
||||
}
|
||||
|
||||
|
||||
def get_mock_workbook(values):
|
||||
wb = models.Workbook()
|
||||
wb.update(values)
|
||||
return wb
|
||||
|
||||
|
||||
class TestKeystoneMiddleware(base.FunctionalTest):
|
||||
"""Test that the keystone middleware AuthProtocol is executed
|
||||
when enabled.
|
||||
@ -83,28 +73,3 @@ class TestKeystoneMiddleware(base.FunctionalTest):
|
||||
'auth_enable': cfg.CONF.pecan.auth_enable
|
||||
}
|
||||
})
|
||||
|
||||
@mock.patch.object(
|
||||
auth_token.AuthProtocol, '_get_user_token_from_header',
|
||||
mock.MagicMock(return_value=''))
|
||||
@mock.patch.object(
|
||||
auth_token.AuthProtocol, '_validate_token',
|
||||
mock.MagicMock(return_value=PKI_TOKEN_VERIFIED))
|
||||
@mock.patch.object(
|
||||
db_api, "workbook_get",
|
||||
mock.MagicMock(return_value=get_mock_workbook(WORKBOOKS[0])))
|
||||
def test_auth_succeed(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook')
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(WORKBOOKS[0], resp.json)
|
||||
|
||||
@mock.patch.object(
|
||||
auth_token.AuthProtocol, '_get_user_token_from_header',
|
||||
mock.MagicMock(return_value=''))
|
||||
@mock.patch.object(
|
||||
db_api, "workbook_get",
|
||||
mock.MagicMock(return_value=get_mock_workbook(WORKBOOKS[0])))
|
||||
def test_auth_fail(self):
|
||||
# 401 unauthorized response is expected because the method
|
||||
# _validate_user_token is not mocked in this test.
|
||||
self.assertUnauthorized('/v1/workbooks/my_workbook')
|
||||
|
@ -1,204 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import mock
|
||||
from webtest import app as webtest_app
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import engine
|
||||
from mistral import exceptions as ex
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
# TODO(everyone): later we need additional tests verifying all the errors etc.
|
||||
|
||||
|
||||
EXECS = [
|
||||
{
|
||||
'id': '123',
|
||||
'workbook_name': 'my_workbook',
|
||||
'task': 'my_task',
|
||||
'state': 'RUNNING',
|
||||
'context': {
|
||||
"person": {
|
||||
"first_name": "John",
|
||||
"last_name": "Doe"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
WORKBOOKS = [
|
||||
{
|
||||
'name': "my_workbook",
|
||||
'description': "My cool Mistral workbook",
|
||||
'tags': ['deployment', 'demo']
|
||||
}
|
||||
]
|
||||
|
||||
UPDATED_EXEC = EXECS[0].copy()
|
||||
UPDATED_EXEC['state'] = 'STOPPED'
|
||||
|
||||
|
||||
def canonize(json_dict):
|
||||
if json_dict.get('context'):
|
||||
json_dict['context'] = json.loads(json_dict['context'])
|
||||
|
||||
return json_dict
|
||||
|
||||
|
||||
class TestExecutionsController(base.FunctionalTest):
|
||||
@mock.patch.object(db_api, 'execution_get',
|
||||
base.create_mock_execution(EXECS[0]))
|
||||
def test_workbook_get(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/executions/123')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(EXECS[0], canonize(resp.json))
|
||||
|
||||
@mock.patch.object(db_api, 'execution_get',
|
||||
base.create_mock_execution(EXECS[0]))
|
||||
def test_root_get(self):
|
||||
resp = self.app.get('/v1/executions/123')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(EXECS[0], canonize(resp.json))
|
||||
|
||||
@mock.patch.object(db_api, 'execution_get',
|
||||
mock.MagicMock(side_effect=ex.NotFoundException()))
|
||||
def test_get_empty(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/executions/123',
|
||||
expect_errors=True)
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, 'execution_update',
|
||||
base.create_mock_execution(UPDATED_EXEC))
|
||||
def test_workbook_put(self):
|
||||
resp = self.app.put_json('/v1/workbooks/my_workbook/executions/123',
|
||||
dict(state='STOPPED'))
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(UPDATED_EXEC, canonize(resp.json))
|
||||
|
||||
@mock.patch.object(db_api, 'execution_update',
|
||||
base.create_mock_execution(UPDATED_EXEC))
|
||||
def test_root_put(self):
|
||||
resp = self.app.put_json('/v1/executions/123',
|
||||
dict(state='STOPPED'))
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(UPDATED_EXEC, canonize(resp.json))
|
||||
|
||||
@mock.patch.object(db_api, 'execution_update',
|
||||
mock.MagicMock(side_effect=ex.NotFoundException()))
|
||||
def test_put_not_found(self):
|
||||
resp = self.app.put_json('/v1/workbooks/my_workbook/executions/123',
|
||||
dict(state='STOPPED'), expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(return_value=EXECS[0]))
|
||||
@mock.patch.object(db_api, 'workbook_definition_get',
|
||||
mock.Mock(return_value="Workflow:"))
|
||||
def test_post(self):
|
||||
my_workbook = WORKBOOKS[0]
|
||||
|
||||
self.app.post_json('/v1/workbooks', my_workbook)
|
||||
|
||||
new_exec = EXECS[0].copy()
|
||||
new_exec['context'] = json.dumps(new_exec['context'])
|
||||
|
||||
resp = self.app.post_json('/v1/workbooks/my_workbook/executions',
|
||||
new_exec)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
self.assertDictEqual(EXECS[0], canonize(resp.json))
|
||||
|
||||
@mock.patch.object(engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(return_value=EXECS[0]))
|
||||
def test_post_definition_empty(self):
|
||||
my_workbook = WORKBOOKS[0]
|
||||
self.app.post_json('/v1/workbooks',
|
||||
my_workbook)
|
||||
|
||||
new_exec = EXECS[0].copy()
|
||||
new_exec['context'] = json.dumps(new_exec['context'])
|
||||
|
||||
resp = self.app.post_json('/v1/workbooks/my_workbook/executions',
|
||||
new_exec, expect_errors=True)
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(side_effect=ex.MistralException))
|
||||
def test_post_throws_exception(self):
|
||||
context = self.assertRaises(webtest_app.AppError, self.app.post_json,
|
||||
'/v1/workbooks/my_workbook/executions',
|
||||
EXECS[0])
|
||||
self.assertIn('Bad response: 400', context.message)
|
||||
|
||||
@mock.patch.object(db_api, 'execution_delete',
|
||||
mock.MagicMock(return_value=None))
|
||||
def test_workbook_delete(self):
|
||||
resp = self.app.delete('/v1/workbooks/my_workbook/executions/123')
|
||||
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
|
||||
@mock.patch.object(db_api, 'execution_delete',
|
||||
mock.MagicMock(return_value=None))
|
||||
def test_root_delete(self):
|
||||
resp = self.app.delete('/v1/executions/123')
|
||||
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
|
||||
@mock.patch.object(db_api, 'execution_delete',
|
||||
mock.MagicMock(side_effect=ex.NotFoundException))
|
||||
def test_delete_not_found(self):
|
||||
resp = self.app.delete('/v1/workbooks/my_workbook/executions/123',
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, 'executions_get',
|
||||
base.create_mock_executions(EXECS))
|
||||
@mock.patch.object(db_api, 'workbook_get',
|
||||
base.create_mock_workbook({'name': 'my_workbook'}))
|
||||
def test_workbook_get_all(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/executions')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(EXECS[0], canonize(resp.json['executions'][0]))
|
||||
|
||||
@mock.patch.object(db_api, 'executions_get',
|
||||
base.create_mock_executions(EXECS))
|
||||
@mock.patch.object(db_api, 'workbook_get',
|
||||
base.create_mock_workbook({'name': 'my_workbook'}))
|
||||
def test_root_get_all(self):
|
||||
resp = self.app.get('/v1/executions')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(EXECS[0], canonize(resp.json['executions'][0]))
|
||||
|
||||
@mock.patch.object(db_api, 'executions_get',
|
||||
mock.MagicMock(return_value=EXECS))
|
||||
def test_get_all_no_workbook(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/executions',
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
@ -1,106 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import exceptions
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
LISTENERS = [
|
||||
{
|
||||
'id': "1",
|
||||
'workbook_name': "my_workbook",
|
||||
'description': "My cool Mistral workbook",
|
||||
'webhook': "http://my.website.org"
|
||||
}
|
||||
]
|
||||
|
||||
UPDATED_LSNR = LISTENERS[0].copy()
|
||||
UPDATED_LSNR['description'] = 'new description'
|
||||
|
||||
|
||||
class TestListenersController(base.FunctionalTest):
|
||||
@mock.patch.object(db_api, "listener_get",
|
||||
mock.MagicMock(return_value=LISTENERS[0]))
|
||||
def test_get(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/listeners/1')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(LISTENERS[0], resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "listener_get",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_get_not_found(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/listeners/1',
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "listener_update",
|
||||
mock.MagicMock(return_value=UPDATED_LSNR))
|
||||
def test_put(self):
|
||||
resp = self.app.put_json('/v1/workbooks/my_workbook/listeners/1',
|
||||
dict(description='new description'))
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(UPDATED_LSNR, resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "listener_update",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_put_not_found(self):
|
||||
resp = self.app.put_json('/v1/workbooks/my_workbook/listeners/1',
|
||||
dict(description='new description'),
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "listener_create",
|
||||
mock.MagicMock(return_value=LISTENERS[0]))
|
||||
def test_post(self):
|
||||
resp = self.app.post_json('/v1/workbooks/my_workbook/listeners',
|
||||
LISTENERS[0])
|
||||
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
self.assertDictEqual(LISTENERS[0], resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "listener_delete",
|
||||
mock.MagicMock(return_value=None))
|
||||
def test_delete(self):
|
||||
resp = self.app.delete('/v1/workbooks/my_workbook/listeners/1')
|
||||
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
|
||||
@mock.patch.object(db_api, "listener_delete",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_delete_not_found(self):
|
||||
resp = self.app.delete('/v1/workbooks/my_workbook/listeners/1',
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "listeners_get",
|
||||
mock.MagicMock(return_value=LISTENERS))
|
||||
def test_get_all(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/listeners')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(LISTENERS[0], resp.json['listeners'][0])
|
@ -1,155 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import mock
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import engine
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
# TODO(everyone): later we need additional tests verifying all the errors etc.
|
||||
|
||||
TASKS = [
|
||||
{
|
||||
'id': "1",
|
||||
'workbook_name': "my_workbook",
|
||||
'execution_id': '123',
|
||||
'name': 'my_task',
|
||||
'description': 'My cool task',
|
||||
'state': 'RUNNING',
|
||||
'tags': ['deployment', 'demo'],
|
||||
'output': {
|
||||
'a': 'b'
|
||||
},
|
||||
'parameters': {
|
||||
'c': 'd'
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
UPDATED_TASK = TASKS[0].copy()
|
||||
UPDATED_TASK['state'] = 'STOPPED'
|
||||
|
||||
|
||||
def canonize(json_dict):
|
||||
if json_dict.get('output'):
|
||||
json_dict['output'] = json.loads(json_dict['output'])
|
||||
|
||||
if json_dict.get('parameters'):
|
||||
json_dict['parameters'] = json.loads(json_dict['parameters'])
|
||||
|
||||
return json_dict
|
||||
|
||||
|
||||
class TestTasksController(base.FunctionalTest):
|
||||
@mock.patch.object(db_api, "task_get", base.create_mock_task(TASKS[0]))
|
||||
def test_workbook_get(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/executions/123/tasks/1')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(TASKS[0], canonize(resp.json))
|
||||
|
||||
@mock.patch.object(db_api, "task_get", base.create_mock_task(TASKS[0]))
|
||||
def test_execution_get(self):
|
||||
resp = self.app.get('/v1/executions/123/tasks/1')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(TASKS[0], canonize(resp.json))
|
||||
|
||||
@mock.patch.object(db_api, "task_get", base.create_mock_task(TASKS[0]))
|
||||
def test_root_get(self):
|
||||
resp = self.app.get('/v1/tasks/1')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(TASKS[0], canonize(resp.json))
|
||||
|
||||
@mock.patch.object(engine.EngineClient, "convey_task_result",
|
||||
mock.MagicMock(return_value=UPDATED_TASK))
|
||||
@mock.patch.object(db_api, "task_get", base.create_mock_task(TASKS[0]))
|
||||
def test_workbook_put(self):
|
||||
resp = self.app.put_json(
|
||||
'/v1/workbooks/my_workbook/executions/123/tasks/1',
|
||||
dict(state='STOPPED', output='{"a":"b"}'))
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(UPDATED_TASK, canonize(resp.json))
|
||||
|
||||
@mock.patch.object(engine.EngineClient, "convey_task_result")
|
||||
@mock.patch.object(db_api, "task_get", base.create_mock_task(TASKS[0]))
|
||||
def test_execution_put(self, convey_task_result):
|
||||
convey_task_result.return_value = UPDATED_TASK
|
||||
resp = self.app.put_json(
|
||||
'/v1/executions/123/tasks/1',
|
||||
dict(state='STOPPED', output='{"a":"b"}'))
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
convey_task_result.called_once_with(resp.json)
|
||||
self.assertDictEqual(UPDATED_TASK, canonize(resp.json))
|
||||
|
||||
@mock.patch.object(engine.EngineClient, "convey_task_result")
|
||||
@mock.patch.object(db_api, "task_get", base.create_mock_task(TASKS[0]))
|
||||
def test_root_put(self, convey_task_result):
|
||||
convey_task_result.return_value = UPDATED_TASK
|
||||
resp = self.app.put_json(
|
||||
'/v1/tasks/1',
|
||||
dict(state='STOPPED', output='{"a":"b"}'))
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
convey_task_result.called_once_with(resp.json)
|
||||
self.assertDictEqual(UPDATED_TASK, canonize(resp.json))
|
||||
|
||||
@mock.patch.object(engine.EngineClient, "convey_task_result",
|
||||
mock.MagicMock(return_value=UPDATED_TASK))
|
||||
def test_put_no_task(self):
|
||||
resp = self.app.put_json(
|
||||
'/v1/workbooks/my_workbook/executions/123/tasks/1',
|
||||
dict(state='STOPPED'), expect_errors=True)
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "tasks_get", base.create_mock_tasks(TASKS))
|
||||
@mock.patch.object(db_api, "ensure_execution_exists",
|
||||
mock.MagicMock(return_value={'id': "abc123"}))
|
||||
def test_workbook_get_all(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/executions/123/tasks')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(TASKS[0], canonize(resp.json['tasks'][0]))
|
||||
|
||||
@mock.patch.object(db_api, "tasks_get", base.create_mock_tasks(TASKS))
|
||||
@mock.patch.object(db_api, "ensure_execution_exists",
|
||||
mock.MagicMock(return_value={'id': "abc123"}))
|
||||
def test_execution_get_all(self):
|
||||
resp = self.app.get('/v1/executions/123/tasks')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(TASKS[0], canonize(resp.json['tasks'][0]))
|
||||
|
||||
@mock.patch.object(db_api, "tasks_get", base.create_mock_tasks(TASKS))
|
||||
@mock.patch.object(db_api, "ensure_execution_exists",
|
||||
mock.MagicMock(return_value={'id': "abc123"}))
|
||||
def test_root_get_all(self):
|
||||
resp = self.app.get('/v1/tasks')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(TASKS[0], canonize(resp.json['tasks'][0]))
|
||||
|
||||
@mock.patch.object(db_api, "tasks_get", base.create_mock_tasks(TASKS))
|
||||
def test_get_all_nonexistent_execution(self):
|
||||
self.assertNotFound('/v1/workbooks/my_workbook/executions/123/tasks')
|
@ -1,97 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import exceptions
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
DEFINITION = "my definition"
|
||||
|
||||
NEW_DEFINITION = """
|
||||
Namespaces:
|
||||
Service:
|
||||
actions:
|
||||
action:
|
||||
class: std.echo
|
||||
base-parameters:
|
||||
output: Haha
|
||||
|
||||
Workflow:
|
||||
tasks:
|
||||
task1:
|
||||
parameters:
|
||||
action: Service:action
|
||||
|
||||
Triggers:
|
||||
create-vms:
|
||||
type: periodic
|
||||
tasks: create-vms
|
||||
parameters:
|
||||
cron-pattern: "* * * * *"
|
||||
"""
|
||||
|
||||
|
||||
class TestWorkbookDefinitionController(base.FunctionalTest):
|
||||
@mock.patch.object(db_api, "workbook_get",
|
||||
base.create_mock_workbook({'definition': DEFINITION}))
|
||||
def test_get(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/definition',
|
||||
headers={"Content-Type": "text/plain"})
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertEqual(DEFINITION, resp.text)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_definition_get",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_get_not_found(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook/definition',
|
||||
headers={"Content-Type": "text/plain"},
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_update",
|
||||
base.create_mock_workbook({
|
||||
'name': 'my_workbook',
|
||||
'definition': NEW_DEFINITION}))
|
||||
def test_put(self):
|
||||
resp = self.app.put('/v1/workbooks/my_workbook/definition',
|
||||
NEW_DEFINITION,
|
||||
headers={"Content-Type": "text/plain"})
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertEqual(NEW_DEFINITION, resp.body)
|
||||
|
||||
# Check that associated triggers have been created in DB.
|
||||
triggers = db_api.triggers_get(workbook_name='my_workbook')
|
||||
|
||||
self.assertEqual(triggers[0]['name'], 'create-vms')
|
||||
self.assertEqual(triggers[0]['pattern'], '* * * * *')
|
||||
self.assertEqual(triggers[0]['workbook_name'], 'my_workbook')
|
||||
|
||||
@mock.patch.object(db_api, "workbook_update",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_put_not_found(self):
|
||||
resp = self.app.put('/v1/workbooks/my_workbook/definition',
|
||||
NEW_DEFINITION,
|
||||
headers={"Content-Type": "text/plain"},
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
@ -1,111 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import exceptions
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
WORKBOOKS = [
|
||||
{
|
||||
u'name': u'my_workbook',
|
||||
u'description': u'My cool Mistral workbook',
|
||||
u'tags': [u'deployment', u'demo'],
|
||||
u'scope': None
|
||||
}
|
||||
]
|
||||
|
||||
UPDATED_WORKBOOK = WORKBOOKS[0].copy()
|
||||
UPDATED_WORKBOOK['description'] = 'new description'
|
||||
|
||||
|
||||
class TestWorkbooksController(base.FunctionalTest):
|
||||
@mock.patch.object(db_api, "workbook_get",
|
||||
base.create_mock_workbook(WORKBOOKS[0]))
|
||||
def test_get(self):
|
||||
resp = self.app.get('/v1/workbooks/my_workbook')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(WORKBOOKS[0], resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_get",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_get_not_found(self):
|
||||
resp = self.app.get('/v1/workbooks/dev_null', expect_errors=True)
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_update",
|
||||
base.create_mock_workbook(UPDATED_WORKBOOK))
|
||||
def test_put(self):
|
||||
resp = self.app.put_json('/v1/workbooks/my_workbook',
|
||||
dict(description='new description'))
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertDictEqual(UPDATED_WORKBOOK, resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_update",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_put_not_found(self):
|
||||
resp = self.app.put_json('/v1/workbooks/my_workbook',
|
||||
dict(description='new description'),
|
||||
expect_errors=True)
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_create",
|
||||
base.create_mock_workbook(WORKBOOKS[0]))
|
||||
@mock.patch("mistral.services.security.create_trust",
|
||||
mock.MagicMock(return_value=WORKBOOKS[0]))
|
||||
def test_post(self):
|
||||
resp = self.app.post_json('/v1/workbooks', WORKBOOKS[0])
|
||||
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
self.assertDictEqual(WORKBOOKS[0], resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_create",
|
||||
mock.MagicMock(side_effect=exceptions.DBDuplicateEntry))
|
||||
def test_post_dup(self):
|
||||
resp = self.app.post_json('/v1/workbooks', WORKBOOKS[0],
|
||||
expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 409)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_delete",
|
||||
mock.MagicMock(return_value=None))
|
||||
def test_delete(self):
|
||||
resp = self.app.delete('/v1/workbooks/my_workbook')
|
||||
|
||||
self.assertEqual(resp.status_int, 204)
|
||||
|
||||
@mock.patch.object(db_api, "workbook_delete",
|
||||
mock.MagicMock(
|
||||
side_effect=exceptions.NotFoundException()))
|
||||
def test_delete_not_found(self):
|
||||
resp = self.app.delete('/v1/workbooks/my_workbook', expect_errors=True)
|
||||
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
@mock.patch.object(db_api, "workbooks_get",
|
||||
base.create_mock_workbooks(WORKBOOKS))
|
||||
def test_get_all(self):
|
||||
resp = self.app.get('/v1/workbooks')
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
self.assertEqual(len(resp.json), 1)
|
||||
self.assertDictEqual(WORKBOOKS[0], resp.json['workbooks'][0])
|
@ -1,33 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.openstack.common import jsonutils
|
||||
from mistral.tests.unit.api import base
|
||||
|
||||
|
||||
class TestRootController(base.FunctionalTest):
|
||||
|
||||
def test_index(self):
|
||||
resp = self.app.get('/', headers={'Accept': 'application/json'})
|
||||
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
|
||||
data = jsonutils.loads(resp.body.decode())
|
||||
|
||||
self.assertEqual(data[0]['id'], 'v1.0')
|
||||
self.assertEqual(data[0]['status'], 'SUPPORTED')
|
||||
self.assertEqual(data[0]['link'], {'href': 'http://localhost/v1',
|
||||
'target': 'v1'})
|
@ -26,7 +26,9 @@ class TestRootController(base.FunctionalTest):
|
||||
|
||||
data = jsonutils.loads(resp.body.decode())
|
||||
|
||||
self.assertEqual(data[1]['id'], 'v2.0')
|
||||
self.assertEqual(data[1]['status'], 'CURRENT')
|
||||
self.assertEqual(data[1]['link'], {'href': 'http://localhost/v2',
|
||||
'target': 'v2'})
|
||||
self.assertEqual(data[0]['id'], 'v2.0')
|
||||
self.assertEqual(data[0]['status'], 'CURRENT')
|
||||
self.assertEqual(
|
||||
data[0]['link'],
|
||||
{'href': 'http://localhost/v2', 'target': 'v2'}
|
||||
)
|
||||
|
@ -1,475 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral import context as auth_context
|
||||
from mistral.db.v1.sqlalchemy import api as db_api
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import timeutils
|
||||
from mistral.tests import base as test_base
|
||||
|
||||
|
||||
TRIGGERS = [
|
||||
{
|
||||
'name': 'test_trigger1',
|
||||
'workbook_name': 'my_workbook1',
|
||||
'pattern': '* *',
|
||||
'next_execution_time': timeutils.utcnow(),
|
||||
'updated_at': None
|
||||
},
|
||||
{
|
||||
'name': 'test_trigger2',
|
||||
'workbook_name': 'my_workbook2',
|
||||
'pattern': '* * *',
|
||||
'next_execution_time': timeutils.utcnow(),
|
||||
'updated_at': None
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class TriggerTest(test_base.DbTestCase):
|
||||
def test_trigger_create_and_get(self):
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
fetched = db_api.trigger_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
def test_trigger_update(self):
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
updated = db_api.trigger_update(created['id'], {'pattern': '0 * *'})
|
||||
self.assertEqual('0 * *', updated.pattern)
|
||||
|
||||
fetched = db_api.trigger_get(created['id'])
|
||||
self.assertEqual(updated, fetched)
|
||||
|
||||
def test_trigger_delete(self):
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
db_api.trigger_delete(created['id'])
|
||||
|
||||
self.assertRaises(exc.NotFoundException, db_api.trigger_get,
|
||||
created['id'])
|
||||
|
||||
def test_trigger_list(self):
|
||||
created0 = db_api.trigger_create(TRIGGERS[0])
|
||||
created1 = db_api.trigger_create(TRIGGERS[1])
|
||||
|
||||
fetched = db_api.triggers_get_all()
|
||||
|
||||
self.assertEqual(2, len(fetched))
|
||||
self.assertEqual(created0, fetched[0])
|
||||
self.assertEqual(created1, fetched[1])
|
||||
|
||||
|
||||
WORKBOOKS = [
|
||||
{
|
||||
'name': 'my_workbook1',
|
||||
'description': 'my description',
|
||||
'definition': 'empty',
|
||||
'tags': ['mc'],
|
||||
'scope': 'public',
|
||||
'updated_at': None,
|
||||
'project_id': '1233',
|
||||
'trust_id': '1234'
|
||||
},
|
||||
{
|
||||
'name': 'my_workbook2',
|
||||
'description': 'my description',
|
||||
'definition': 'empty',
|
||||
'tags': ['mc'],
|
||||
'scope': 'private',
|
||||
'updated_at': None,
|
||||
'project_id': '1233',
|
||||
'trust_id': '12345'
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class WorkbookTest(test_base.DbTestCase):
|
||||
def test_workbook_create_and_get(self):
|
||||
created = db_api.workbook_create(WORKBOOKS[0])
|
||||
|
||||
fetched = db_api.workbook_get(created['name'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
def test_workbook_update(self):
|
||||
created = db_api.workbook_create(WORKBOOKS[0])
|
||||
|
||||
updated = db_api.workbook_update(created['name'],
|
||||
{'description': 'my new desc'})
|
||||
self.assertEqual('my new desc', updated['description'])
|
||||
|
||||
fetched = db_api.workbook_get(created['name'])
|
||||
self.assertEqual(updated, fetched)
|
||||
|
||||
def test_workbook_list(self):
|
||||
created0 = db_api.workbook_create(WORKBOOKS[0])
|
||||
created1 = db_api.workbook_create(WORKBOOKS[1])
|
||||
|
||||
fetched = db_api.workbooks_get_all()
|
||||
|
||||
self.assertEqual(2, len(fetched))
|
||||
self.assertEqual(created0, fetched[0])
|
||||
self.assertEqual(created1, fetched[1])
|
||||
|
||||
def test_workbook_delete(self):
|
||||
created = db_api.workbook_create(WORKBOOKS[0])
|
||||
|
||||
fetched = db_api.workbook_get(created['name'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
db_api.workbook_delete(created['name'])
|
||||
self.assertRaises(exc.NotFoundException,
|
||||
db_api.workbook_get, created['name'])
|
||||
|
||||
def test_workbook_private(self):
|
||||
# create a workbook(scope=private) as under one project
|
||||
# then make sure it's NOT visible for other projects.
|
||||
created1 = db_api.workbook_create(WORKBOOKS[1])
|
||||
|
||||
fetched = db_api.workbooks_get_all()
|
||||
|
||||
self.assertEqual(1, len(fetched))
|
||||
self.assertEqual(created1, fetched[0])
|
||||
|
||||
# create a new user.
|
||||
ctx = auth_context.MistralContext(user_id='9-0-44-5',
|
||||
project_id='99-88-33',
|
||||
user_name='test-user',
|
||||
project_name='test-another',
|
||||
is_admin=False)
|
||||
auth_context.set_ctx(ctx)
|
||||
|
||||
fetched = db_api.workbooks_get_all()
|
||||
self.assertEqual(0, len(fetched))
|
||||
|
||||
def test_workbook_public(self):
|
||||
# create a workbook(scope=public) as under one project
|
||||
# then make sure it's visible for other projects.
|
||||
created0 = db_api.workbook_create(WORKBOOKS[0])
|
||||
|
||||
fetched = db_api.workbooks_get_all()
|
||||
|
||||
self.assertEqual(1, len(fetched))
|
||||
self.assertEqual(created0, fetched[0])
|
||||
|
||||
# assert that the project_id stored is actually the context's
|
||||
# project_id not the one given.
|
||||
self.assertEqual(created0['project_id'], auth_context.ctx().project_id)
|
||||
self.assertNotEqual(WORKBOOKS[0]['project_id'],
|
||||
auth_context.ctx().project_id)
|
||||
|
||||
# create a new user.
|
||||
ctx = auth_context.MistralContext(user_id='9-0-44-5',
|
||||
project_id='99-88-33',
|
||||
user_name='test-user',
|
||||
project_name='test-another',
|
||||
is_admin=False)
|
||||
auth_context.set_ctx(ctx)
|
||||
|
||||
fetched = db_api.workbooks_get_all()
|
||||
|
||||
self.assertEqual(1, len(fetched))
|
||||
self.assertEqual(created0, fetched[0])
|
||||
self.assertEqual('public', created0['scope'])
|
||||
|
||||
|
||||
EXECUTIONS = [
|
||||
{
|
||||
'workbook_name': 'my_workbook',
|
||||
'task': 'my_task1',
|
||||
'state': 'IDLE',
|
||||
'updated_at': None,
|
||||
'context': None
|
||||
},
|
||||
{
|
||||
'workbook_name': 'my_workbook',
|
||||
'task': 'my_task2',
|
||||
'state': 'RUNNING',
|
||||
'updated_at': None,
|
||||
'context': {'image_id': '123123'}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class ExecutionTest(test_base.DbTestCase):
|
||||
def test_execution_create_and_get(self):
|
||||
created = db_api.execution_create(EXECUTIONS[0]['workbook_name'],
|
||||
EXECUTIONS[0])
|
||||
|
||||
fetched = db_api.execution_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
def test_execution_update(self):
|
||||
created = db_api.execution_create(EXECUTIONS[0]['workbook_name'],
|
||||
EXECUTIONS[0])
|
||||
|
||||
updated = db_api.execution_update(created['id'],
|
||||
{'task': 'task10'})
|
||||
self.assertEqual('task10', updated['task'])
|
||||
|
||||
fetched = db_api.execution_get(created['id'])
|
||||
self.assertEqual(updated, fetched)
|
||||
|
||||
def test_execution_list(self):
|
||||
created0 = db_api.execution_create(EXECUTIONS[0]['workbook_name'],
|
||||
EXECUTIONS[0])
|
||||
created1 = db_api.execution_create(EXECUTIONS[1]['workbook_name'],
|
||||
EXECUTIONS[1])
|
||||
|
||||
fetched = db_api.executions_get(
|
||||
workbook_name=EXECUTIONS[0]['workbook_name'])
|
||||
|
||||
self.assertEqual(2, len(fetched))
|
||||
self.assertEqual(created0, fetched[0])
|
||||
self.assertEqual(created1, fetched[1])
|
||||
|
||||
def test_execution_delete(self):
|
||||
created = db_api.execution_create(EXECUTIONS[0]['workbook_name'],
|
||||
EXECUTIONS[0])
|
||||
|
||||
fetched = db_api.execution_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
db_api.execution_delete(created['id'])
|
||||
self.assertRaises(exc.NotFoundException,
|
||||
db_api.execution_get,
|
||||
created['id'])
|
||||
|
||||
|
||||
TASKS = [
|
||||
{
|
||||
'workbook_name': 'my_workbook',
|
||||
'execution_id': '1',
|
||||
'name': 'my_task1',
|
||||
'description': 'my description',
|
||||
'requires': ['my_task2', 'my_task3'],
|
||||
'task_spec': None,
|
||||
'action_spec': None,
|
||||
'action': {'name': 'Nova:create-vm'},
|
||||
'state': 'IDLE',
|
||||
'tags': ['deployment'],
|
||||
'updated_at': None,
|
||||
'in_context': None,
|
||||
'parameters': None,
|
||||
'output': None,
|
||||
'task_runtime_context': None
|
||||
},
|
||||
{
|
||||
'workbook_name': 'my_workbook',
|
||||
'execution_id': '1',
|
||||
'name': 'my_task2',
|
||||
'description': 'my description',
|
||||
'requires': ['my_task4', 'my_task5'],
|
||||
'task_spec': None,
|
||||
'action_spec': None,
|
||||
'action': {'name': 'Cinder:create-volume'},
|
||||
'state': 'IDLE',
|
||||
'tags': ['deployment'],
|
||||
'updated_at': None,
|
||||
'in_context': {'image_id': '123123'},
|
||||
'parameters': {'image_id': '123123'},
|
||||
'output': {'vm_id': '343123'},
|
||||
'task_runtime_context': None
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class TaskTest(test_base.DbTestCase):
|
||||
def test_task_create_and_get(self):
|
||||
created = db_api.task_create(TASKS[0]['execution_id'],
|
||||
TASKS[0])
|
||||
|
||||
fetched = db_api.task_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
def test_task_update(self):
|
||||
created = db_api.task_create(TASKS[0]['execution_id'],
|
||||
TASKS[0])
|
||||
|
||||
updated = db_api.task_update(created['id'],
|
||||
{'description': 'my new desc'})
|
||||
self.assertEqual('my new desc', updated['description'])
|
||||
|
||||
fetched = db_api.task_get(created['id'])
|
||||
self.assertEqual(updated, fetched)
|
||||
|
||||
def test_task_list(self):
|
||||
created0 = db_api.task_create(TASKS[0]['execution_id'],
|
||||
TASKS[0])
|
||||
created1 = db_api.task_create(TASKS[1]['execution_id'],
|
||||
TASKS[1])
|
||||
|
||||
fetched = db_api.tasks_get(
|
||||
workbook_name=TASKS[0]['workbook_name'])
|
||||
|
||||
self.assertEqual(2, len(fetched))
|
||||
self.assertEqual(created0, fetched[0])
|
||||
self.assertEqual(created1, fetched[1])
|
||||
|
||||
def test_task_delete(self):
|
||||
created = db_api.task_create(TASKS[0]['execution_id'],
|
||||
TASKS[0])
|
||||
|
||||
fetched = db_api.task_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
db_api.task_delete(created['id'])
|
||||
self.assertRaises(exc.NotFoundException, db_api.task_get,
|
||||
created['id'])
|
||||
|
||||
|
||||
class TXTest(test_base.DbTestCase):
|
||||
def test_rollback(self):
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
fetched = db_api.trigger_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
self.assertTrue(self.is_db_session_open())
|
||||
|
||||
db_api.rollback_tx()
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
self.assertRaises(exc.NotFoundException,
|
||||
db_api.trigger_get, created['id'])
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
def test_commit(self):
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
fetched = db_api.trigger_get(created.id)
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
self.assertTrue(self.is_db_session_open())
|
||||
|
||||
db_api.commit_tx()
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
fetched = db_api.trigger_get(created.id)
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
def test_commit_transaction(self):
|
||||
with db_api.transaction():
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
fetched = db_api.trigger_get(created.id)
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
self.assertTrue(self.is_db_session_open())
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
fetched = db_api.trigger_get(created.id)
|
||||
|
||||
self.assertEqual(created, fetched)
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
def test_rollback_multiple_objects(self):
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
fetched = db_api.trigger_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
created_workbook = db_api.workbook_create(WORKBOOKS[0])
|
||||
|
||||
fetched_workbook = db_api.workbook_get(created_workbook['name'])
|
||||
self.assertEqual(created_workbook, fetched_workbook)
|
||||
|
||||
self.assertTrue(self.is_db_session_open())
|
||||
|
||||
db_api.rollback_tx()
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
self.assertRaises(exc.NotFoundException,
|
||||
db_api.trigger_get, created['id'])
|
||||
|
||||
self.assertRaises(exc.NotFoundException, db_api.workbook_get,
|
||||
created_workbook['name'])
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
def test_rollback_transaction(self):
|
||||
try:
|
||||
with db_api.transaction():
|
||||
created = db_api.workbook_create(WORKBOOKS[0])
|
||||
fetched = db_api.workbook_get(
|
||||
created['name']
|
||||
)
|
||||
|
||||
self.assertEqual(created, fetched)
|
||||
self.assertTrue(self.is_db_session_open())
|
||||
|
||||
db_api.workbook_create(WORKBOOKS[0])
|
||||
except exc.DBDuplicateEntry:
|
||||
pass
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
self.assertRaises(
|
||||
exc.NotFoundException,
|
||||
db_api.workbook_get,
|
||||
created['name']
|
||||
)
|
||||
|
||||
def test_commit_multiple_objects(self):
|
||||
db_api.start_tx()
|
||||
|
||||
try:
|
||||
created = db_api.trigger_create(TRIGGERS[0])
|
||||
|
||||
fetched = db_api.trigger_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
created_workbook = db_api.workbook_create(WORKBOOKS[0])
|
||||
|
||||
fetched_workbook = db_api.workbook_get(created_workbook['name'])
|
||||
self.assertEqual(created_workbook, fetched_workbook)
|
||||
|
||||
self.assertTrue(self.is_db_session_open())
|
||||
|
||||
db_api.commit_tx()
|
||||
finally:
|
||||
db_api.end_tx()
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
||||
|
||||
fetched = db_api.trigger_get(created['id'])
|
||||
self.assertEqual(created, fetched)
|
||||
|
||||
fetched_workbook = db_api.workbook_get(created_workbook['name'])
|
||||
self.assertEqual(created_workbook, fetched_workbook)
|
||||
|
||||
self.assertFalse(self.is_db_session_open())
|
@ -1,382 +0,0 @@
|
||||
# Copyright (c) 2013 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral import context as auth_context
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.db.v1.sqlalchemy import models
|
||||
from mistral import engine
|
||||
from mistral.engine.drivers.default import engine as concrete_engine
|
||||
from mistral.engine import executor
|
||||
from mistral.engine import states
|
||||
from mistral import expressions
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
WB_NAME = "my_workbook"
|
||||
CONTEXT = None # TODO(rakhmerov): Use a meaningful value.
|
||||
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
# the change in value is not permanent.
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
# TODO(rakhmerov): add more tests for errors, execution stop etc.
|
||||
|
||||
|
||||
def get_mock_workbook(file, name='my_wb'):
|
||||
wb = models.Workbook()
|
||||
|
||||
wb.name = name
|
||||
wb.definition = base.get_resource(file)
|
||||
|
||||
return wb
|
||||
|
||||
|
||||
@mock.patch.object(auth_context, 'ctx', mock.MagicMock())
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_start_workflow))
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_task_result))
|
||||
@mock.patch.object(
|
||||
std_actions.HTTPAction, 'run',
|
||||
mock.MagicMock(return_value={'state': states.SUCCESS}))
|
||||
class TestEngine(base.EngineTestCase):
|
||||
@mock.patch.object(executor.ExecutorClient, "handle_task",
|
||||
mock.MagicMock())
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/one_sync_task.yaml')))
|
||||
def test_with_one_task(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME, "build_name",
|
||||
CONTEXT)
|
||||
|
||||
task = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])[0]
|
||||
|
||||
executor.ExecutorClient.handle_task.assert_called_once_with(
|
||||
auth_context.ctx(),
|
||||
params={'output': 'Stormin Stanley'},
|
||||
task_id=task['id'],
|
||||
action_name='std.echo')
|
||||
|
||||
self.engine.convey_task_result(task['id'],
|
||||
states.SUCCESS,
|
||||
{'output': 'Stormin Stanley'})
|
||||
|
||||
task = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])[0]
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(execution['state'], states.SUCCESS)
|
||||
self.assertEqual(task['state'], states.SUCCESS)
|
||||
self.assertEqual(
|
||||
task['output'],
|
||||
{'task': {'build_name': {'string': 'Stormin Stanley'}}})
|
||||
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'get_workflow_execution_state',
|
||||
mock.MagicMock(
|
||||
side_effect=base.EngineTestCase.mock_get_workflow_state))
|
||||
@mock.patch.object(executor.ExecutorClient, "handle_task",
|
||||
mock.MagicMock())
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/require_flow.yaml')))
|
||||
def test_require_flow(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME, "greet",
|
||||
CONTEXT)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.engine.convey_task_result(tasks[0]['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertIsNotNone(tasks)
|
||||
self.assertEqual(3, len(tasks))
|
||||
self.assertEqual(tasks[0]['state'], states.SUCCESS)
|
||||
self.assertEqual(tasks[1]['state'], states.RUNNING)
|
||||
self.assertEqual(tasks[2]['state'], states.IDLE)
|
||||
self.assertEqual(states.RUNNING,
|
||||
self.engine.get_workflow_execution_state(
|
||||
WB_NAME, execution['id']))
|
||||
|
||||
self.engine.convey_task_result(tasks[1]['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(3, len(tasks))
|
||||
self.assertEqual(tasks[2]['state'], states.RUNNING)
|
||||
|
||||
self.engine.convey_task_result(tasks[2]['id'],
|
||||
states.SUCCESS, None)
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(execution['state'], states.SUCCESS)
|
||||
self.assertEqual(tasks[0]['state'], states.SUCCESS)
|
||||
self.assertEqual(tasks[1]['state'], states.SUCCESS)
|
||||
self.assertEqual(tasks[2]['state'], states.SUCCESS)
|
||||
self.assertEqual(states.SUCCESS,
|
||||
self.engine.get_workflow_execution_state(
|
||||
WB_NAME, execution['id']))
|
||||
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'get_workflow_execution_state',
|
||||
mock.MagicMock(
|
||||
side_effect=base.EngineTestCase.mock_get_workflow_state))
|
||||
@mock.patch.object(executor.ExecutorClient, "handle_task",
|
||||
mock.MagicMock())
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/require_flow.yaml')))
|
||||
def test_require_error_flow(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME, "greet",
|
||||
CONTEXT)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.engine.convey_task_result(tasks[0]['id'],
|
||||
states.ERROR, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertIsNotNone(tasks)
|
||||
self.assertEqual(3, len(tasks))
|
||||
self.assertEqual(tasks[0]['state'], states.ERROR)
|
||||
self.assertEqual(tasks[1]['state'], states.RUNNING)
|
||||
self.assertEqual(tasks[2]['state'], states.IDLE)
|
||||
self.assertEqual(states.ERROR,
|
||||
self.engine.get_workflow_execution_state(
|
||||
WB_NAME, execution['id']))
|
||||
|
||||
self.engine.convey_task_result(tasks[1]['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(3, len(tasks))
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(execution['state'], states.ERROR)
|
||||
self.assertEqual(tasks[0]['state'], states.ERROR)
|
||||
self.assertEqual(tasks[1]['state'], states.SUCCESS)
|
||||
self.assertEqual(tasks[2]['state'], states.IDLE)
|
||||
self.assertEqual(states.ERROR,
|
||||
self.engine.get_workflow_execution_state(
|
||||
WB_NAME, execution['id']))
|
||||
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
@mock.patch.object(
|
||||
expressions, "evaluate", mock.MagicMock(side_effect=lambda x, y: x))
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/one_sync_task.yaml')))
|
||||
def test_with_one_sync_task(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME, "build_name",
|
||||
CONTEXT)
|
||||
|
||||
task = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])[0]
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(execution['state'], states.SUCCESS)
|
||||
self.assertEqual(task['state'], states.SUCCESS)
|
||||
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
@mock.patch.object(
|
||||
expressions, "evaluate", mock.MagicMock(side_effect=lambda x, y: x))
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/direct_flow.yaml')))
|
||||
def test_direct_flow_on_success_finish(self):
|
||||
# Start workflow.
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
"start-task",
|
||||
CONTEXT)
|
||||
# Only the first task is RUNNING
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
self.assertEqual(len(tasks), 1)
|
||||
task = self._assert_single_item(tasks,
|
||||
name='start-task',
|
||||
state=states.RUNNING)
|
||||
|
||||
# Make 'start-task' successful.
|
||||
self.engine.convey_task_result(task['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
self.assertEqual(len(tasks), 3)
|
||||
self._assert_single_item(tasks,
|
||||
name='start-task',
|
||||
state=states.SUCCESS)
|
||||
task1 = self._assert_single_item(tasks,
|
||||
name='task-one',
|
||||
state=states.RUNNING)
|
||||
self._assert_single_item(tasks,
|
||||
name='task-two',
|
||||
state=states.RUNNING)
|
||||
|
||||
# Make 'task-one' tasks successful.
|
||||
self.engine.convey_task_result(task1['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
tasks_2 = self._assert_multiple_items(tasks, 2,
|
||||
name='task-two',
|
||||
state=states.RUNNING)
|
||||
|
||||
# Make both 'task-two' task successful.
|
||||
self.engine.convey_task_result(tasks_2[0]['id'],
|
||||
states.SUCCESS, None)
|
||||
self.engine.convey_task_result(tasks_2[1]['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self._assert_multiple_items(tasks, 4, state=states.SUCCESS)
|
||||
self.assertEqual(execution['state'], states.SUCCESS)
|
||||
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
@mock.patch.object(
|
||||
expressions, "evaluate", mock.MagicMock(side_effect=lambda x, y: x))
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/direct_flow.yaml')))
|
||||
def test_direct_flow_on_error_finish(self):
|
||||
# Start workflow.
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
"start-task",
|
||||
CONTEXT)
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(execution['state'], states.RUNNING)
|
||||
start_task = self._assert_single_item(tasks,
|
||||
name='start-task',
|
||||
state=states.RUNNING)
|
||||
|
||||
# Make 'start-task' task fail.
|
||||
self.engine.convey_task_result(start_task['id'],
|
||||
states.ERROR, CONTEXT)
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(len(tasks), 4)
|
||||
task3 = self._assert_single_item(tasks,
|
||||
name='task-three',
|
||||
state=states.RUNNING)
|
||||
task2 = self._assert_single_item(tasks,
|
||||
name='task-two',
|
||||
state=states.RUNNING)
|
||||
task4 = self._assert_single_item(tasks,
|
||||
name='task-four',
|
||||
state=states.RUNNING)
|
||||
|
||||
# Make all running tasks successful.
|
||||
self.engine.convey_task_result(task2['id'],
|
||||
states.SUCCESS, None)
|
||||
self.engine.convey_task_result(task3['id'],
|
||||
states.SUCCESS, None)
|
||||
self.engine.convey_task_result(task4['id'],
|
||||
states.SUCCESS, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self._assert_multiple_items(tasks, 3, state=states.SUCCESS)
|
||||
self._assert_single_item(tasks, state=states.ERROR)
|
||||
self.assertEqual(execution['state'], states.SUCCESS)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/no_namespaces.yaml')))
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
def test_engine_with_no_namespaces(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME, "task1", {})
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertIsNotNone(tasks)
|
||||
self.assertEqual(1, len(tasks))
|
||||
self.assertEqual(tasks[0]['state'], states.SUCCESS)
|
||||
self.assertEqual(execution['state'], states.SUCCESS)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'control_flow/one_std_task.yaml')))
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
def test_engine_task_std_action_with_namespaces(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
"std_http_task", {})
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
self.assertEqual(states.SUCCESS, tasks[0]['state'])
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
@ -1,191 +0,0 @@
|
||||
# Copyright (c) 2013 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import uuid
|
||||
|
||||
import eventlet
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
|
||||
eventlet.monkey_patch()
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import engine
|
||||
from mistral.engine import executor
|
||||
from mistral.engine import states
|
||||
from mistral import exceptions
|
||||
from mistral.openstack.common import importutils
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
|
||||
|
||||
# We need to make sure that all configuration properties are registered.
|
||||
importutils.import_module("mistral.config")
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
# the change in value is not permanent.
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
|
||||
WORKBOOK_NAME = 'my_workbook'
|
||||
TASK_NAME = 'create-vms'
|
||||
|
||||
SAMPLE_WORKBOOK = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': WORKBOOK_NAME,
|
||||
'description': 'my description',
|
||||
'definition': base.get_resource("test_rest.yaml"),
|
||||
'tags': [],
|
||||
'scope': 'public',
|
||||
'updated_at': None,
|
||||
'project_id': '123',
|
||||
'trust_id': '1234'
|
||||
}
|
||||
|
||||
SAMPLE_EXECUTION = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'workbook_name': WORKBOOK_NAME,
|
||||
'task': TASK_NAME,
|
||||
'state': states.RUNNING,
|
||||
'updated_at': None,
|
||||
'context': None
|
||||
}
|
||||
|
||||
SAMPLE_TASK = {
|
||||
'name': TASK_NAME,
|
||||
'workbook_name': WORKBOOK_NAME,
|
||||
'execution_id': 'Will be filled up by SetUp',
|
||||
'action_spec': {
|
||||
'name': 'my-action',
|
||||
'class': 'std.http',
|
||||
'base-parameters': {
|
||||
'url': 'http://localhost:8989/v1/workbooks',
|
||||
'method': 'GET'
|
||||
},
|
||||
'namespace': 'MyRest'
|
||||
},
|
||||
'in_context': {},
|
||||
'parameters': {},
|
||||
'task_spec': {
|
||||
'action': 'MyRest.my-action',
|
||||
'name': TASK_NAME},
|
||||
'requires': [],
|
||||
'state': states.IDLE}
|
||||
|
||||
SAMPLE_CONTEXT = {
|
||||
'user': 'admin',
|
||||
'tenant': 'mistral'
|
||||
}
|
||||
|
||||
|
||||
@mock.patch.object(
|
||||
executor.ExecutorClient, 'handle_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_handle_task))
|
||||
class TestExecutor(base.DbTestCase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TestExecutor, self).__init__(*args, **kwargs)
|
||||
|
||||
self.transport = base.get_fake_transport()
|
||||
|
||||
def setUp(self):
|
||||
super(TestExecutor, self).setUp()
|
||||
|
||||
# Create a new workbook.
|
||||
self.workbook = db_api.workbook_create(SAMPLE_WORKBOOK)
|
||||
|
||||
# Create a new execution.
|
||||
self.execution = db_api.execution_create(
|
||||
SAMPLE_EXECUTION['workbook_name'], SAMPLE_EXECUTION)
|
||||
|
||||
self.addCleanup(db_api.execution_delete, SAMPLE_EXECUTION['id'])
|
||||
|
||||
# Create a new task.
|
||||
SAMPLE_TASK['execution_id'] = self.execution['id']
|
||||
self.task = db_api.task_create(
|
||||
SAMPLE_TASK['execution_id'], SAMPLE_TASK)
|
||||
|
||||
def test_setup(self):
|
||||
"""Validate test setup."""
|
||||
self.assertIsNotNone(self.workbook)
|
||||
self.assertIsNotNone(self.execution)
|
||||
self.assertIsNotNone(self.task)
|
||||
self.assertIsNotNone(self.task.id)
|
||||
|
||||
@mock.patch.object(std_actions.EchoAction, 'run')
|
||||
@mock.patch.object(engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock())
|
||||
def test_handle_task(self, action):
|
||||
task_id = '12345'
|
||||
action_name = 'std.echo'
|
||||
params = {
|
||||
'output': 'some'
|
||||
}
|
||||
|
||||
# Send the task request to the Executor.
|
||||
ex_client = executor.ExecutorClient(self.transport)
|
||||
ex_client.handle_task(SAMPLE_CONTEXT,
|
||||
task_id=task_id,
|
||||
action_name=action_name,
|
||||
params=params)
|
||||
|
||||
engine.EngineClient.convey_task_result.assert_called_once_with(
|
||||
task_id,
|
||||
states.SUCCESS,
|
||||
action.return_value)
|
||||
|
||||
@mock.patch.object(engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock())
|
||||
def test_handle_task_not_registered(self):
|
||||
task_id = '12345'
|
||||
action_name = 'not.registered'
|
||||
params = {
|
||||
'output': 'some'
|
||||
}
|
||||
|
||||
# Send the task request to the Executor.
|
||||
ex_client = executor.ExecutorClient(self.transport)
|
||||
self.assertRaises(exceptions.ActionException, ex_client.handle_task,
|
||||
SAMPLE_CONTEXT,
|
||||
task_id=task_id,
|
||||
action_name=action_name,
|
||||
params=params)
|
||||
|
||||
self.assertFalse(engine.EngineClient.convey_task_result.called)
|
||||
|
||||
@mock.patch.object(std_actions.EchoAction, 'run',
|
||||
mock.MagicMock(side_effect=exceptions.ActionException))
|
||||
@mock.patch.object(engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock())
|
||||
def test_handle_task_action_exception(self):
|
||||
task_id = '12345'
|
||||
action_name = 'std.echo'
|
||||
params = {
|
||||
'output': 'some'
|
||||
}
|
||||
|
||||
# Send the task request to the Executor.
|
||||
ex_client = executor.ExecutorClient(self.transport)
|
||||
with mock.patch('mistral.engine.drivers.default.executor.'
|
||||
'DefaultExecutor._log_action_exception') as log:
|
||||
ex_client.handle_task(SAMPLE_CONTEXT,
|
||||
task_id=task_id,
|
||||
action_name=action_name,
|
||||
params=params)
|
||||
self.assertTrue(log.called, "Exception must be logged")
|
||||
|
||||
engine.EngineClient.convey_task_result.assert_called_once_with(
|
||||
task_id, states.ERROR, None)
|
@ -1,505 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import engine
|
||||
from mistral.engine.drivers.default import engine as concrete_engine
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
from mistral.utils.openstack import keystone
|
||||
|
||||
|
||||
# TODO(rakhmerov): add more tests
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
TOKEN = "123ab"
|
||||
USER_ID = "321ba"
|
||||
|
||||
CONTEXT = {
|
||||
'person': {
|
||||
'first_name': 'John',
|
||||
'last_name': 'Doe',
|
||||
'address': {
|
||||
'street': '124352 Broadway Street',
|
||||
'city': 'Gloomington',
|
||||
'country': 'USA'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
# the change in value is not permanent.
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
|
||||
def create_workbook(definition_path):
|
||||
return db_api.workbook_create({
|
||||
'name': 'my_workbook',
|
||||
'definition': base.get_resource(definition_path)
|
||||
})
|
||||
|
||||
|
||||
def context_contains_required(task):
|
||||
requires = task.get('task_spec').get('requires')
|
||||
subcontexts = task.get('in_context').get('task', {})
|
||||
|
||||
return set(requires.keys()).issubset(set(subcontexts.keys()))
|
||||
|
||||
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_start_workflow))
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_task_result))
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
class DataFlowTest(base.EngineTestCase):
|
||||
def _check_in_context_execution(self, task):
|
||||
self.assertIn('__execution', task.in_context)
|
||||
|
||||
exec_dict = task.in_context['__execution']
|
||||
|
||||
self.assertEqual('my_workbook', exec_dict['workbook_name'])
|
||||
self.assertEqual(task['execution_id'], exec_dict['id'])
|
||||
self.assertIn('task', exec_dict)
|
||||
|
||||
def test_two_dependent_tasks(self):
|
||||
CTX = copy.copy(CONTEXT)
|
||||
|
||||
wb = create_workbook('data_flow/two_dependent_tasks.yaml')
|
||||
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
'build_greeting',
|
||||
CTX)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
self.assertDictEqual(CTX, execution['context'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(2, len(tasks))
|
||||
|
||||
build_full_name_task = self._assert_single_item(
|
||||
tasks, name='build_full_name')
|
||||
build_greeting_task = self._assert_single_item(
|
||||
tasks, name='build_greeting')
|
||||
|
||||
# Check the first task.
|
||||
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
|
||||
self._check_in_context_execution(build_full_name_task)
|
||||
del build_full_name_task.in_context['__execution']
|
||||
self.assertDictEqual(CTX, build_full_name_task.in_context)
|
||||
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
|
||||
build_full_name_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'f_name': 'John Doe',
|
||||
'task': {
|
||||
'build_full_name': {
|
||||
'full_name': 'John Doe'
|
||||
}
|
||||
}
|
||||
},
|
||||
build_full_name_task['output'])
|
||||
|
||||
# Check the second task.
|
||||
in_context = CTX
|
||||
in_context['f_name'] = 'John Doe'
|
||||
|
||||
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
|
||||
self.assertEqual('John Doe',
|
||||
build_greeting_task.in_context['f_name'])
|
||||
self.assertDictEqual({'full_name': 'John Doe'},
|
||||
build_greeting_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'task': {
|
||||
'build_greeting': {
|
||||
'greeting': 'Hello, John Doe!',
|
||||
}
|
||||
}
|
||||
},
|
||||
build_greeting_task['output'])
|
||||
|
||||
del build_greeting_task['in_context']['task']
|
||||
|
||||
self._check_in_context_execution(build_greeting_task)
|
||||
del build_greeting_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_greeting_task['in_context'])
|
||||
|
||||
def test_task_with_two_dependencies(self):
|
||||
CTX = copy.copy(CONTEXT)
|
||||
|
||||
wb = create_workbook('data_flow/task_with_two_dependencies.yaml')
|
||||
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
'send_greeting',
|
||||
CTX)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
self.assertDictEqual(CTX, execution['context'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(3, len(tasks))
|
||||
|
||||
build_full_name_task = self._assert_single_item(
|
||||
tasks, name='build_full_name')
|
||||
build_greeting_task = self._assert_single_item(
|
||||
tasks, name='build_greeting')
|
||||
send_greeting_task = self._assert_single_item(
|
||||
tasks, name='send_greeting')
|
||||
|
||||
# Check the first task.
|
||||
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
|
||||
self._check_in_context_execution(build_full_name_task)
|
||||
del build_full_name_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_full_name_task['in_context'])
|
||||
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
|
||||
build_full_name_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'f_name': 'John Doe',
|
||||
'task': {
|
||||
'build_full_name': {
|
||||
'full_name': 'John Doe',
|
||||
}
|
||||
}
|
||||
},
|
||||
build_full_name_task['output'])
|
||||
|
||||
# Check the second task.
|
||||
in_context = CTX
|
||||
in_context['f_name'] = 'John Doe'
|
||||
|
||||
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
|
||||
self.assertEqual('John Doe',
|
||||
build_greeting_task['in_context']['f_name'])
|
||||
self.assertDictEqual({}, build_greeting_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'greet_msg': 'Cheers!',
|
||||
'task': {
|
||||
'build_greeting': {
|
||||
'greeting': 'Cheers!'
|
||||
}
|
||||
}
|
||||
},
|
||||
build_greeting_task['output'])
|
||||
|
||||
del build_greeting_task['in_context']['task']
|
||||
|
||||
self._check_in_context_execution(build_greeting_task)
|
||||
del build_greeting_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_greeting_task['in_context'])
|
||||
|
||||
# Check the third task.
|
||||
in_context = CTX
|
||||
in_context['f_name'] = 'John Doe'
|
||||
in_context['greet_msg'] = 'Cheers!'
|
||||
in_context['task'] = {
|
||||
'build_greeting': {
|
||||
'greeting': 'Cheers!'
|
||||
},
|
||||
'build_full_name': {
|
||||
'full_name': 'John Doe',
|
||||
}
|
||||
}
|
||||
|
||||
self.assertEqual(states.SUCCESS, send_greeting_task['state'])
|
||||
self._check_in_context_execution(send_greeting_task)
|
||||
|
||||
self.assertEqual(2, len(send_greeting_task['in_context']['task']))
|
||||
|
||||
del send_greeting_task['in_context']['__execution']
|
||||
self.assertDictEqual(in_context, send_greeting_task['in_context'])
|
||||
self.assertDictEqual({'f_name': 'John Doe', 'greet_msg': 'Cheers!'},
|
||||
send_greeting_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'task': {
|
||||
'send_greeting': {
|
||||
'greeting_sent': True
|
||||
}
|
||||
}
|
||||
},
|
||||
send_greeting_task['output'])
|
||||
|
||||
def test_task_with_diamond_dependencies(self):
|
||||
CTX = copy.copy(CONTEXT)
|
||||
|
||||
wb = create_workbook('data_flow/task_with_diamond_dependencies.yaml')
|
||||
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
'send_greeting',
|
||||
CTX)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
self.assertDictEqual(CTX, execution['context'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(4, len(tasks))
|
||||
|
||||
results = {
|
||||
'build_full_name': ('full_name', 'John Doe'),
|
||||
'build_address': ('address', 'To John Doe'),
|
||||
'build_greeting': ('greeting', 'Dear John Doe'),
|
||||
'send_greeting': ('task',
|
||||
{'send_greeting':
|
||||
{'string': 'To John Doe. Dear John Doe,..'}})
|
||||
}
|
||||
|
||||
for task in tasks:
|
||||
self.assertTrue(context_contains_required(task),
|
||||
"Task context is incomplete: %s" % task['name'])
|
||||
key, value = results[task['name']]
|
||||
self.assertEqual(value, task['output'][key])
|
||||
|
||||
def test_two_subsequent_tasks(self):
|
||||
CTX = copy.copy(CONTEXT)
|
||||
|
||||
wb = create_workbook('data_flow/two_subsequent_tasks.yaml')
|
||||
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
'build_full_name',
|
||||
CTX)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
self.assertDictEqual(CTX, execution['context'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(2, len(tasks))
|
||||
|
||||
build_full_name_task = self._assert_single_item(
|
||||
tasks, name='build_full_name')
|
||||
build_greeting_task = self._assert_single_item(
|
||||
tasks, name='build_greeting')
|
||||
|
||||
# Check the first task.
|
||||
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
|
||||
self._check_in_context_execution(build_full_name_task)
|
||||
del build_full_name_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_full_name_task['in_context'])
|
||||
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
|
||||
build_full_name_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'f_name': 'John Doe',
|
||||
'task': {
|
||||
'build_full_name': {
|
||||
'full_name': 'John Doe'
|
||||
}
|
||||
}
|
||||
},
|
||||
build_full_name_task['output'])
|
||||
|
||||
# Check the second task.
|
||||
in_context = CTX
|
||||
in_context['f_name'] = 'John Doe'
|
||||
|
||||
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
|
||||
self.assertEqual('John Doe',
|
||||
build_greeting_task['in_context']['f_name'])
|
||||
self.assertDictEqual({'full_name': 'John Doe'},
|
||||
build_greeting_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'greet_msg': {
|
||||
'greet_message': 'Hello, John Doe!'
|
||||
},
|
||||
'task': {
|
||||
'build_greeting': {
|
||||
'greeting': {
|
||||
'greet_message': 'Hello, John Doe!'
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
build_greeting_task['output'])
|
||||
|
||||
del build_greeting_task['in_context']['task']
|
||||
|
||||
self._check_in_context_execution(build_greeting_task)
|
||||
del build_greeting_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_greeting_task['in_context'])
|
||||
|
||||
def test_three_subsequent_tasks(self):
|
||||
CTX = copy.copy(CONTEXT)
|
||||
|
||||
wb = create_workbook('data_flow/three_subsequent_tasks.yaml')
|
||||
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
'build_full_name',
|
||||
CTX)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
self.assertDictEqual(CTX, execution['context'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(3, len(tasks))
|
||||
|
||||
build_full_name_task = self._assert_single_item(
|
||||
tasks, name='build_full_name')
|
||||
build_greeting_task = self._assert_single_item(
|
||||
tasks, name='build_greeting')
|
||||
send_greeting_task = self._assert_single_item(
|
||||
tasks, name='send_greeting')
|
||||
|
||||
# Check the first task.
|
||||
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
|
||||
self._check_in_context_execution(build_full_name_task)
|
||||
del build_full_name_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_full_name_task['in_context'])
|
||||
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
|
||||
build_full_name_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'f_name': 'John Doe',
|
||||
'task': {
|
||||
'build_full_name': {
|
||||
'full_name': 'John Doe'
|
||||
}
|
||||
}
|
||||
},
|
||||
build_full_name_task['output'])
|
||||
|
||||
# Check the second task.
|
||||
in_context = CTX
|
||||
in_context['f_name'] = 'John Doe'
|
||||
|
||||
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
|
||||
self.assertEqual('John Doe',
|
||||
build_greeting_task['in_context']['f_name'])
|
||||
self.assertDictEqual({'full_name': 'John Doe'},
|
||||
build_greeting_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'greet_msg': 'Hello, John Doe!',
|
||||
'task': {
|
||||
'build_greeting': {
|
||||
'greeting': 'Hello, John Doe!',
|
||||
}
|
||||
}
|
||||
},
|
||||
build_greeting_task['output'])
|
||||
|
||||
del build_greeting_task['in_context']['task']
|
||||
|
||||
self._check_in_context_execution(build_greeting_task)
|
||||
del build_greeting_task['in_context']['__execution']
|
||||
self.assertDictEqual(CTX, build_greeting_task['in_context'])
|
||||
|
||||
# Check the third task.
|
||||
in_context = CTX
|
||||
in_context['f_name'] = 'John Doe'
|
||||
in_context['greet_msg'] = 'Hello, John Doe!'
|
||||
|
||||
self.assertEqual(states.SUCCESS, send_greeting_task['state'])
|
||||
self.assertEqual('John Doe',
|
||||
send_greeting_task.in_context['f_name'])
|
||||
self.assertEqual('Hello, John Doe!',
|
||||
send_greeting_task.in_context['greet_msg'])
|
||||
self.assertDictEqual({'greeting': 'Hello, John Doe!'},
|
||||
send_greeting_task['parameters'])
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'sent': True,
|
||||
'task': {
|
||||
'send_greeting': {
|
||||
'greeting_sent': True,
|
||||
}
|
||||
}
|
||||
},
|
||||
send_greeting_task['output'])
|
||||
|
||||
self.assertEqual(2, len(send_greeting_task.in_context['task']))
|
||||
|
||||
del send_greeting_task['in_context']['task']
|
||||
|
||||
self._check_in_context_execution(send_greeting_task)
|
||||
del send_greeting_task.in_context['__execution']
|
||||
self.assertDictEqual(CTX, send_greeting_task.in_context)
|
||||
|
||||
@mock.patch.object(
|
||||
std_actions.HTTPAction, 'run',
|
||||
mock.MagicMock(return_value={'state': states.RUNNING}))
|
||||
@mock.patch.object(
|
||||
keystone, 'client_for_trusts',
|
||||
mock.Mock(return_value=mock.MagicMock(user_id=USER_ID,
|
||||
auth_token=TOKEN)))
|
||||
def test_add_token_to_context(self):
|
||||
task_name = 'create-vms'
|
||||
|
||||
cfg.CONF.pecan.auth_enable = True
|
||||
|
||||
try:
|
||||
wb = create_workbook('test_rest.yaml')
|
||||
wb = db_api.workbook_update(wb.name, {'trust_id': '123'})
|
||||
|
||||
execution = self.engine.start_workflow_execution(wb.name,
|
||||
task_name, {})
|
||||
tasks = db_api.tasks_get(workbook_name=wb.name,
|
||||
execution_id=execution['id'])
|
||||
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
openstack_context = task.in_context['openstack']
|
||||
|
||||
self.assertIn("auth_token", openstack_context)
|
||||
self.assertEqual(TOKEN, openstack_context['auth_token'])
|
||||
self.assertEqual(USER_ID, openstack_context["user_id"])
|
||||
|
||||
self.engine.convey_task_result(task.id, states.SUCCESS, {})
|
||||
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution.state)
|
||||
finally:
|
||||
cfg.CONF.pecan.auth_enable = False
|
@ -1,142 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.db.v1.sqlalchemy import models
|
||||
from mistral.engine import data_flow
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
WB_NAME = 'my_workbook'
|
||||
EXEC_ID = '1'
|
||||
|
||||
CONTEXT = {
|
||||
'param1': 'val1',
|
||||
'param2': 'val2',
|
||||
'param3': {
|
||||
'param31': 'val31',
|
||||
'param32': 'val32'
|
||||
}
|
||||
}
|
||||
|
||||
TASK = {
|
||||
'workbook_name': WB_NAME,
|
||||
'execution_id': EXEC_ID,
|
||||
'name': 'my_task',
|
||||
'task_spec': {
|
||||
'action': 'std.echo',
|
||||
'parameters': {
|
||||
'p1': 'My string',
|
||||
'p2': '<% $.param3.param32 %>',
|
||||
'p3': ''
|
||||
},
|
||||
'publish': {
|
||||
'new_key11': '<% $.new_key1 %>'
|
||||
}
|
||||
},
|
||||
'in_context': CONTEXT
|
||||
}
|
||||
|
||||
TASK2 = copy.deepcopy(TASK)
|
||||
TASK2['task_spec']['action'] = 'some.thing'
|
||||
|
||||
WORKBOOK = {
|
||||
'Namespaces': {
|
||||
'some': {
|
||||
'actions': {
|
||||
'thing': {
|
||||
'class': 'std.echo',
|
||||
'base-parameters': {
|
||||
'output': '<% $.p1 %> <% $.p2 %>'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'Workflow': {
|
||||
'tasks': {
|
||||
'first_task': TASK['task_spec'],
|
||||
'second_task': TASK2['task_spec']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class DataFlowModuleTest(base.DbTestCase):
|
||||
def test_evaluate_task_parameters(self):
|
||||
db_task = models.Task()
|
||||
db_task.update(TASK)
|
||||
|
||||
parameters = data_flow.evaluate_task_parameters(db_task, CONTEXT)
|
||||
|
||||
self.assertEqual(3, len(parameters))
|
||||
self.assertEqual('My string', parameters['p1'])
|
||||
self.assertEqual('val32', parameters['p2'])
|
||||
|
||||
def test_prepare_tasks(self):
|
||||
wb = spec_parser.get_workbook_spec(WORKBOOK)
|
||||
|
||||
tasks = [
|
||||
db_api.task_create(EXEC_ID, TASK.copy()),
|
||||
db_api.task_create(EXEC_ID, TASK2.copy())
|
||||
]
|
||||
|
||||
executables = data_flow.prepare_tasks(tasks, CONTEXT, wb, tasks)
|
||||
|
||||
self.assertEqual(2, len(executables))
|
||||
|
||||
self.assertEqual(tasks[0]['id'], executables[0][0])
|
||||
self.assertEqual('std.echo', executables[0][1])
|
||||
self.assertDictEqual({'p2': 'val32', 'p3': '', 'p1': 'My string'},
|
||||
executables[0][2])
|
||||
|
||||
self.assertEqual(tasks[1]['id'], executables[1][0])
|
||||
self.assertEqual('std.echo', executables[1][1])
|
||||
self.assertDictEqual({'output': 'My string val32'},
|
||||
executables[1][2])
|
||||
|
||||
for task in tasks:
|
||||
db_task = db_api.task_get(task['id'])
|
||||
|
||||
self.assertDictEqual(CONTEXT, db_task['in_context'])
|
||||
self.assertDictEqual({'p1': 'My string',
|
||||
'p2': 'val32',
|
||||
'p3': ''},
|
||||
db_task['parameters'])
|
||||
self.assertEqual(states.RUNNING, db_task['state'])
|
||||
|
||||
def test_get_outbound_context(self):
|
||||
db_task = models.Task()
|
||||
db_task.update(TASK)
|
||||
|
||||
output = data_flow.get_task_output(db_task, {'new_key1': 'new_val1'})
|
||||
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'new_key11': 'new_val1',
|
||||
'task': {
|
||||
'my_task': {
|
||||
'new_key1': 'new_val1'
|
||||
}
|
||||
}
|
||||
},
|
||||
output)
|
@ -1,196 +0,0 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.actions.openstack import actions
|
||||
from mistral import context as auth_context
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral import engine
|
||||
from mistral.engine.drivers.default import engine as concrete_engine
|
||||
from mistral.engine.drivers.default import executor
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
# the change in value is not permanent.
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
|
||||
def create_workbook(definition_path):
|
||||
return db_api.workbook_create({
|
||||
'name': 'my_workbook',
|
||||
'definition': base.get_resource(definition_path)
|
||||
})
|
||||
|
||||
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_start_workflow))
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_task_result))
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
class OpenStackActionsEngineTest(base.EngineTestCase):
|
||||
@mock.patch.object(actions.GlanceAction, 'run',
|
||||
mock.Mock(return_value="images"))
|
||||
def test_glance_action(self):
|
||||
context = {}
|
||||
wb = create_workbook('openstack/glance.yaml')
|
||||
task_name = 'glance_image_list'
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
task_name,
|
||||
context)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
self.assertEqual(states.SUCCESS, task['state'])
|
||||
self.assertEqual("images", task['output']['task'][task_name])
|
||||
|
||||
@mock.patch.object(actions.KeystoneAction, 'run',
|
||||
mock.Mock(return_value="users"))
|
||||
def test_keystone_action(self):
|
||||
context = {}
|
||||
wb = create_workbook('openstack/keystone.yaml')
|
||||
task_name = 'keystone_user_list'
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
task_name,
|
||||
context)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
self.assertEqual(states.SUCCESS, task['state'])
|
||||
self.assertEqual("users", task['output']['task'][task_name])
|
||||
|
||||
@mock.patch.object(actions.NovaAction, 'run',
|
||||
mock.Mock(return_value="servers"))
|
||||
@mock.patch.object(executor.DefaultExecutor, "handle_task",
|
||||
mock.MagicMock())
|
||||
def test_nova_action(self):
|
||||
context = {}
|
||||
task_name = 'nova_server_findall'
|
||||
task_params = {'status': 'ACTIVE', 'tenant_id': '8e44eb2ce32'}
|
||||
wb = create_workbook('openstack/nova.yaml')
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
task_name,
|
||||
context)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
executor.DefaultExecutor.handle_task.assert_called_once_with(
|
||||
auth_context.ctx(),
|
||||
params=task_params,
|
||||
task_id=task['id'],
|
||||
action_name="nova.servers_findall"
|
||||
)
|
||||
|
||||
self.engine.convey_task_result(task['id'],
|
||||
states.SUCCESS,
|
||||
"servers")
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
self.assertEqual(states.SUCCESS, task['state'])
|
||||
self.assertEqual("servers", task['output']['task'][task_name])
|
||||
|
||||
@mock.patch.object(actions.HeatAction, 'run',
|
||||
mock.Mock(return_value="stacks"))
|
||||
def test_heat_action(self):
|
||||
context = {}
|
||||
wb = create_workbook('openstack/heat.yaml')
|
||||
task_name = 'heat_stack_list'
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
task_name,
|
||||
context)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
self.assertEqual(states.SUCCESS, task['state'])
|
||||
self.assertEqual("stacks", task['output']['task'][task_name])
|
||||
|
||||
@mock.patch.object(actions.NeutronAction, 'run',
|
||||
mock.Mock(return_value="networks"))
|
||||
def test_neutron_action(self):
|
||||
context = {}
|
||||
wb = create_workbook('openstack_tasks/neutron.yaml')
|
||||
task_name = 'neutron_list_networks'
|
||||
execution = self.engine.start_workflow_execution(wb['name'],
|
||||
task_name,
|
||||
context)
|
||||
|
||||
# We have to reread execution to get its latest version.
|
||||
execution = db_api.execution_get(execution['id'])
|
||||
|
||||
self.assertEqual(states.SUCCESS, execution['state'])
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=wb['name'],
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.assertEqual(1, len(tasks))
|
||||
|
||||
task = self._assert_single_item(tasks, name=task_name)
|
||||
|
||||
self.assertEqual(states.SUCCESS, task['state'])
|
||||
self.assertEqual("networks", task['output']['task'][task_name])
|
@ -1,310 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import eventlet
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.db.v1.sqlalchemy import models as m
|
||||
from mistral import engine
|
||||
from mistral.engine.drivers.default import engine as concrete_engine
|
||||
from mistral.engine import states
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
# the change in value is not permanent.
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
WB_NAME = "my_workbook"
|
||||
|
||||
|
||||
# TODO(rakhmerov): Find a better home for this method.
|
||||
def get_mock_workbook(file, name='my_wb'):
|
||||
wb = m.Workbook()
|
||||
|
||||
wb.name = name
|
||||
wb.definition = base.get_resource(file)
|
||||
|
||||
return wb
|
||||
|
||||
|
||||
def _get_workbook(workbook_name):
|
||||
wb = db_api.workbook_get(workbook_name)
|
||||
return spec_parser.get_workbook_spec_from_yaml(wb["definition"])
|
||||
|
||||
|
||||
class FailBeforeSuccessMocker(object):
|
||||
def __init__(self, fail_count):
|
||||
self._max_fail_count = fail_count
|
||||
self._call_count = 0
|
||||
|
||||
def mock_partial_failure(self, *args):
|
||||
if self._call_count < self._max_fail_count:
|
||||
self._call_count += 1
|
||||
raise exc.ActionException()
|
||||
|
||||
return "result"
|
||||
|
||||
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'start_workflow_execution',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_start_workflow))
|
||||
@mock.patch.object(
|
||||
engine.EngineClient, 'convey_task_result',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_task_result))
|
||||
@mock.patch.object(
|
||||
concrete_engine.DefaultEngine, '_run_task',
|
||||
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_task))
|
||||
@mock.patch.object(
|
||||
std_actions.HTTPAction, 'run',
|
||||
mock.MagicMock(return_value='result'))
|
||||
class TaskRetryTest(base.EngineTestCase):
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/retry_task.yaml')))
|
||||
def test_no_retry(self):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
'retry_task', None)
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.SUCCESS,
|
||||
{'output': 'result'})
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
self._assert_single_item(tasks, name='retry_task')
|
||||
self._assert_single_item(tasks, task_runtime_context=None)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/retry_task.yaml')))
|
||||
def test_retry_always_error(self):
|
||||
workbook = _get_workbook(WB_NAME)
|
||||
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
'retry_task', None)
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
task_spec = workbook.tasks.get(tasks[0]['name'])
|
||||
retry_count, _, __ = task_spec.get_retry_parameters()
|
||||
|
||||
for x in xrange(0, retry_count + 1):
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.ERROR,
|
||||
{'output': 'result'})
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name='retry_task')
|
||||
self._assert_single_item(tasks, task_runtime_context={
|
||||
'retry_no': retry_count - 1})
|
||||
self._assert_single_item(tasks, state=states.ERROR)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/retry_task.yaml')))
|
||||
def test_retry_eventual_success(self):
|
||||
workbook = _get_workbook(WB_NAME)
|
||||
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
'retry_task', None)
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
task_spec = workbook.tasks.get(tasks[0]['name'])
|
||||
retry_count, _, __ = task_spec.get_retry_parameters()
|
||||
|
||||
for x in xrange(0, retry_count / 2):
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.ERROR,
|
||||
{'output': 'result'})
|
||||
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.SUCCESS,
|
||||
{'output': 'result'})
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name='retry_task')
|
||||
self._assert_single_item(tasks, task_runtime_context={
|
||||
'retry_no': retry_count / 2 - 1})
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/delay_retry_task.yaml')))
|
||||
def test_retry_delay(self):
|
||||
task_name = 'delay_retry_task'
|
||||
workbook = _get_workbook(WB_NAME)
|
||||
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
task_name, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
task_spec = workbook.tasks.get(tasks[0]['name'])
|
||||
retry_count, _, delay = task_spec.get_retry_parameters()
|
||||
|
||||
for x in xrange(0, retry_count):
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=task_name)
|
||||
self._assert_single_item(tasks, state=states.RUNNING)
|
||||
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.ERROR,
|
||||
{'output': 'result'})
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
self._assert_single_item(tasks, name=task_name)
|
||||
self._assert_single_item(tasks, state=states.DELAYED)
|
||||
|
||||
eventlet.sleep(delay * 2)
|
||||
|
||||
# Convey final result outside the loop.
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.ERROR,
|
||||
{'output': 'result'})
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=task_name)
|
||||
self._assert_single_item(tasks, task_runtime_context={
|
||||
'retry_no': retry_count - 1})
|
||||
self._assert_single_item(tasks, state=states.ERROR)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/two_tasks.yaml')))
|
||||
def test_from_no_retry_to_retry_task(self):
|
||||
task_name_1 = 'no_retry_task'
|
||||
task_name_2 = 'delay_retry_task'
|
||||
workbook = _get_workbook(WB_NAME)
|
||||
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
task_name_1, None)
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=task_name_1)
|
||||
|
||||
self.engine.convey_task_result(tasks[0]['id'], states.SUCCESS,
|
||||
{'output': 'result'})
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=task_name_2)
|
||||
|
||||
task_spec = workbook.tasks.get(task_name_2)
|
||||
retry_count, _, delay = task_spec.get_retry_parameters()
|
||||
|
||||
for x in xrange(0, retry_count):
|
||||
self.engine.convey_task_result(tasks[1]['id'], states.ERROR,
|
||||
{'output': 'result'})
|
||||
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
self._assert_single_item(tasks, name=task_name_1)
|
||||
self._assert_single_item(tasks, state=states.DELAYED)
|
||||
|
||||
eventlet.sleep(delay * 2)
|
||||
|
||||
# Convey final result outside the loop.
|
||||
self.engine.convey_task_result(tasks[1]['id'], states.ERROR,
|
||||
{'output': 'result'})
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=task_name_2)
|
||||
self._assert_single_item(tasks, task_runtime_context={
|
||||
'retry_no': retry_count - 1})
|
||||
self._assert_single_item(tasks, state=states.ERROR)
|
||||
|
||||
@mock.patch.object(std_actions.EchoAction, "run",
|
||||
mock.MagicMock(side_effect=exc.ActionException))
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/sync_task.yaml')))
|
||||
def test_sync_action_always_error(self):
|
||||
workbook = _get_workbook(WB_NAME)
|
||||
start_task = 'sync-task'
|
||||
task_spec = workbook.tasks.get(start_task)
|
||||
retry_count, _, __ = task_spec.get_retry_parameters()
|
||||
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
start_task, None)
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=start_task)
|
||||
self._assert_single_item(tasks, task_runtime_context={
|
||||
'retry_no': retry_count - 1})
|
||||
self._assert_single_item(tasks, state=states.ERROR)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(return_value=get_mock_workbook(
|
||||
'retry_task/sync_task.yaml')))
|
||||
def test_sync_action_eventual_success(self):
|
||||
start_task = 'sync-task'
|
||||
workbook = _get_workbook(WB_NAME)
|
||||
task_spec = workbook.tasks.get(start_task)
|
||||
retry_count, _, __ = task_spec.get_retry_parameters()
|
||||
|
||||
# After a pre-set no of retries the mock method will return a
|
||||
# success to simulate this test-case.
|
||||
mock_functor = FailBeforeSuccessMocker(retry_count / 2 + 1)
|
||||
|
||||
with mock.patch.object(std_actions.EchoAction, "run",
|
||||
side_effect=mock_functor.mock_partial_failure):
|
||||
execution = self.engine.start_workflow_execution(WB_NAME,
|
||||
start_task,
|
||||
None)
|
||||
|
||||
# TODO(rakhmerov): It's not stable, need to avoid race condition.
|
||||
tasks = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])
|
||||
|
||||
self._assert_single_item(tasks, name=start_task)
|
||||
self._assert_single_item(tasks, task_runtime_context={
|
||||
'retry_no': retry_count / 2})
|
||||
self._assert_single_item(tasks, state=states.SUCCESS)
|
@ -1,99 +0,0 @@
|
||||
# Copyright (c) 2013 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import time
|
||||
|
||||
import eventlet
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
|
||||
eventlet.monkey_patch()
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral.cmd import launch
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.db.v1.sqlalchemy import models
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import importutils
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.tests import base
|
||||
|
||||
|
||||
# We need to make sure that all configuration properties are registered.
|
||||
importutils.import_module("mistral.config")
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
# the change in value is not permanent.
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
WB_NAME = 'my_workbook'
|
||||
CONTEXT = None # TODO(rakhmerov): Use a meaningful value.
|
||||
|
||||
|
||||
def get_mock_workbook(file, name='my_wb'):
|
||||
wb = models.Workbook()
|
||||
|
||||
wb.name = name
|
||||
wb.definition = base.get_resource(file)
|
||||
|
||||
return wb
|
||||
|
||||
|
||||
class TestTransport(base.EngineTestCase):
|
||||
def setUp(self):
|
||||
super(TestTransport, self).setUp()
|
||||
|
||||
# Run the Engine and Executor in the background.
|
||||
self.en_thread = eventlet.spawn(launch.launch_engine, self.transport)
|
||||
self.addCleanup(self.en_thread.kill)
|
||||
self.ex_thread = eventlet.spawn(launch.launch_executor, self.transport)
|
||||
self.addCleanup(self.ex_thread.kill)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api, 'workbook_get',
|
||||
mock.MagicMock(
|
||||
return_value=get_mock_workbook('test_rest.yaml')))
|
||||
@mock.patch.object(
|
||||
std_actions.HTTPAction, 'run', mock.MagicMock(return_value={}))
|
||||
def test_transport(self):
|
||||
"""Test if engine request traversed through the oslo.messaging
|
||||
transport.
|
||||
"""
|
||||
execution = self.engine.start_workflow_execution(
|
||||
WB_NAME, 'create-vms', CONTEXT)
|
||||
|
||||
task = db_api.tasks_get(workbook_name=WB_NAME,
|
||||
execution_id=execution['id'])[0]
|
||||
|
||||
# Check task execution state. There is no timeout mechanism in
|
||||
# unittest. There is an example to add a custom timeout decorator that
|
||||
# can wrap this test function in another process and then manage the
|
||||
# process time. However, it seems more straightforward to keep the
|
||||
# loop finite.
|
||||
for i in range(0, 50):
|
||||
db_task = db_api.task_get(task['id'])
|
||||
# Ensure the request reached the executor and the action has ran.
|
||||
if db_task['state'] != states.IDLE:
|
||||
# We have to wait sometime due to time interval between set
|
||||
# task state to RUNNING and invocation action.run()
|
||||
time.sleep(0.1)
|
||||
self.assertIn(db_task['state'],
|
||||
[states.RUNNING, states.SUCCESS, states.ERROR])
|
||||
return
|
||||
time.sleep(0.1)
|
||||
|
||||
# Task is not being processed. Throw an exception here.
|
||||
raise Exception('Timed out waiting for task to be processed.')
|
@ -1,86 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.engine import states
|
||||
from mistral.engine import workflow
|
||||
from mistral.tests import base
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
TASKS = [
|
||||
{
|
||||
'name': 'backup-vms',
|
||||
'state': states.IDLE,
|
||||
'task_spec': {
|
||||
'requires': {}
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'create-vms',
|
||||
'state': states.SUCCESS,
|
||||
'task_spec': {
|
||||
'requires': {}
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'attach-volume',
|
||||
'state': states.IDLE,
|
||||
'task_spec': {
|
||||
'requires': {
|
||||
'create-vms': ''
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class WorkflowTest(base.DbTestCase):
|
||||
def setUp(self):
|
||||
super(WorkflowTest, self).setUp()
|
||||
|
||||
def test_find_workflow_tasks(self):
|
||||
wb_definition = base.get_resource("test_rest.yaml")
|
||||
|
||||
tasks = workflow.find_workflow_tasks(
|
||||
spec_parser.get_workbook_spec_from_yaml(wb_definition),
|
||||
"attach-volumes"
|
||||
)
|
||||
|
||||
self.assertEqual(2, len(tasks))
|
||||
|
||||
self._assert_single_item(tasks, name='create-vms')
|
||||
self._assert_single_item(tasks, name='attach-volumes')
|
||||
|
||||
def test_find_workflow_tasks_order(self):
|
||||
wb_definition = base.get_resource("test_order.yaml")
|
||||
|
||||
tasks = workflow.find_workflow_tasks(
|
||||
spec_parser.get_workbook_spec_from_yaml(wb_definition),
|
||||
'task'
|
||||
)
|
||||
|
||||
self.assertEqual(5, len(tasks))
|
||||
|
||||
completed = set()
|
||||
|
||||
for i, task in enumerate(tasks):
|
||||
self.assertTrue(set(task.requires.keys()).issubset(completed),
|
||||
"Task %s isn't completed yet" % task.name)
|
||||
completed.add(task.name)
|
||||
|
||||
def test_tasks_to_start(self):
|
||||
tasks_to_start = workflow.find_resolved_tasks(TASKS)
|
||||
self.assertEqual(len(tasks_to_start), 2)
|
@ -76,6 +76,10 @@ class EngineTestCase(base.DbTestCase):
|
||||
|
||||
# Set the transport to 'fake' for Engine tests.
|
||||
cfg.CONF.set_default('rpc_backend', 'fake')
|
||||
|
||||
# Drop all RPC objects (transport, clients).
|
||||
rpc.cleanup()
|
||||
|
||||
transport = rpc.get_transport()
|
||||
|
||||
self.engine_client = rpc.EngineClient(transport)
|
||||
@ -94,11 +98,10 @@ class EngineTestCase(base.DbTestCase):
|
||||
# Start scheduler.
|
||||
scheduler_thread_group = scheduler.setup()
|
||||
|
||||
self.addCleanup(self.kill_threads)
|
||||
self.addCleanup(scheduler_thread_group.stop)
|
||||
|
||||
def tearDown(self):
|
||||
super(EngineTestCase, self).tearDown()
|
||||
|
||||
def kill_threads(self):
|
||||
LOG.info("Finishing engine and executor threads...")
|
||||
|
||||
[thread.kill() for thread in self.threads]
|
||||
|
@ -20,10 +20,10 @@ import requests
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workbooks as wb_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workflow import states
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -21,10 +21,10 @@ import testtools
|
||||
|
||||
from mistral.actions import std_actions
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workflows as wf_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workflow import states
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -17,11 +17,11 @@ from oslo.config import cfg
|
||||
import testtools
|
||||
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workbooks as wb_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.utils import javascript
|
||||
from mistral.workflow import states
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
|
@ -16,10 +16,10 @@ from oslo.config import cfg
|
||||
import testtools
|
||||
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workflows as wf_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workflow import states
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -15,10 +15,10 @@
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workflows as wf_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workflow import states
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
|
@ -15,7 +15,6 @@
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.engine1 import policies
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
@ -23,6 +22,7 @@ from mistral.services import workbooks as wb_service
|
||||
from mistral.services import workflows as wf_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workbook import parser as spec_parser
|
||||
from mistral.workflow import states
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
# Use the set_default method to set value otherwise in certain test cases
|
||||
|
@ -17,11 +17,11 @@ from oslo.config import cfg
|
||||
import testtools
|
||||
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import scheduler
|
||||
from mistral.services import workflows as wf_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workflow import states
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -16,12 +16,12 @@ import copy
|
||||
from oslo.config import cfg
|
||||
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import states
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workbooks as wb_service
|
||||
from mistral.tests.unit.engine1 import base
|
||||
from mistral.workflow import data_flow
|
||||
from mistral.workflow import states
|
||||
from mistral.workflow import utils as wf_utils
|
||||
|
||||
# TODO(nmakhotkin) Need to write more tests.
|
||||
|
@ -26,53 +26,6 @@ from mistral.tests import base
|
||||
cfg.CONF.set_default('auth_enable', False, group='pecan')
|
||||
|
||||
|
||||
class TriggerServiceV1Test(base.DbTestCase):
|
||||
def setUp(self):
|
||||
super(TriggerServiceV1Test, self).setUp()
|
||||
|
||||
self.wb_name = 'My workbook'
|
||||
|
||||
def test_trigger_create(self):
|
||||
t = t_s.create_trigger_v1(
|
||||
'test',
|
||||
'*/5 * * * *',
|
||||
self.wb_name,
|
||||
datetime.datetime(2010, 8, 25)
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
datetime.datetime(2010, 8, 25, 0, 5),
|
||||
t['next_execution_time']
|
||||
)
|
||||
|
||||
next_time = t_s.get_next_execution_time(
|
||||
t['pattern'],
|
||||
t['next_execution_time']
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
datetime.datetime(2010, 8, 25, 0, 10),
|
||||
next_time
|
||||
)
|
||||
|
||||
def test_get_trigger_in_correct_orders(self):
|
||||
start_t = datetime.datetime(2010, 8, 25)
|
||||
t_s.create_trigger_v1('test1', '*/5 * * * *', self.wb_name, start_t)
|
||||
|
||||
start_t = datetime.datetime(2010, 8, 22)
|
||||
t_s.create_trigger_v1('test2', '*/5 * * * *', self.wb_name, start_t)
|
||||
|
||||
start_t = datetime.datetime(2010, 9, 21)
|
||||
t_s.create_trigger_v1('test3', '*/5 * * * *', self.wb_name, start_t)
|
||||
|
||||
start_t = datetime.datetime.now() + datetime.timedelta(0, 50)
|
||||
t_s.create_trigger_v1('test4', '*/5 * * * *', self.wb_name, start_t)
|
||||
|
||||
trigger_names = [t['name'] for t in t_s.get_next_triggers_v1()]
|
||||
|
||||
self.assertEqual(trigger_names, ['test2', 'test1', 'test3'])
|
||||
|
||||
|
||||
WORKFLOW_LIST = """
|
||||
---
|
||||
version: '2.0'
|
||||
|
@ -1,47 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pkg_resources as pkg
|
||||
|
||||
from mistral.db.v1 import api as db_api
|
||||
from mistral.services import triggers as trigger_srv
|
||||
from mistral.tests import base
|
||||
from mistral import version
|
||||
|
||||
|
||||
class TriggersTest(base.DbTestCase):
|
||||
def setUp(self):
|
||||
super(TriggersTest, self).setUp()
|
||||
|
||||
self.doc = open(pkg.resource_filename(
|
||||
version.version_info.package,
|
||||
"tests/resources/test_rest.yaml")).read()
|
||||
|
||||
def test_create_associated_triggers(self):
|
||||
workbook = {
|
||||
'name': 'my_workbook',
|
||||
'definition': self.doc
|
||||
}
|
||||
|
||||
wb_db = db_api.workbook_create(workbook)
|
||||
|
||||
trigger_srv.create_associated_triggers(wb_db)
|
||||
|
||||
triggers = db_api.triggers_get(workbook_name='my_workbook')
|
||||
|
||||
self.assertEqual(triggers[0]['name'], 'create-vms')
|
||||
self.assertEqual(triggers[0]['pattern'], '* * * * *')
|
||||
self.assertEqual(triggers[0]['workbook_name'], 'my_workbook')
|
Loading…
Reference in New Issue
Block a user