# Copyright 2013 - Mirantis, Inc. # Copyright 2015 - StackStorm, Inc. # Copyright 2019 - NetCracker Technology Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from oslo_log import log as logging from pecan import rest from wsme import types as wtypes import wsmeext.pecan as wsme_pecan from mistral.api import access_control as acl from mistral.api.controllers.v2 import action_execution from mistral.api.controllers.v2 import resources from mistral.api.controllers.v2 import sub_execution from mistral.api.controllers.v2 import types from mistral import context from mistral.db.v2 import api as db_api from mistral.db.v2.sqlalchemy import models as db_models from mistral import exceptions as exc from mistral import expressions as expr from mistral.lang import parser as spec_parser from mistral.rpc import clients as rpc from mistral.utils import filter_utils from mistral.utils import rest_utils from mistral.workflow import data_flow from mistral.workflow import states LOG = logging.getLogger(__name__) STATE_TYPES = wtypes.Enum( str, states.IDLE, states.RUNNING, states.SUCCESS, states.ERROR, states.RUNNING_DELAYED ) def _get_task_resource_with_result(task_ex): task = resources.Task.from_db_model(task_ex) task.result = json.dumps(data_flow.get_task_execution_result(task_ex)) return task # Use retries to prevent possible failures. @rest_utils.rest_retry_on_db_error def _get_task_execution(id): with db_api.transaction(): task_ex = db_api.get_task_execution(id) rest_utils.load_deferred_fields(task_ex, ['workflow_execution']) rest_utils.load_deferred_fields( task_ex.workflow_execution, ['context', 'input', 'params', 'root_execution'] ) rest_utils.load_deferred_fields( task_ex.workflow_execution.root_execution, ['params'] ) return _get_task_resource_with_result(task_ex), task_ex def get_published_global(task_ex, wf_ex=None): if task_ex.state not in [states.SUCCESS, states.ERROR]: return if wf_ex is None: wf_ex = task_ex.workflow_execution expr_ctx = data_flow.ContextView( data_flow.get_current_task_dict(task_ex), task_ex.in_context, data_flow.get_workflow_environment_dict(wf_ex), wf_ex.context, wf_ex.input ) task_spec = spec_parser.get_task_spec(task_ex.spec) publish_spec = task_spec.get_publish(task_ex.state) if not publish_spec: return global_vars = publish_spec.get_global() return expr.evaluate_recursively(global_vars, expr_ctx) def _task_with_published_global(task, task_ex): published_global_vars = get_published_global(task_ex) if published_global_vars: task.published_global = published_global_vars return task class TaskExecutionsController(rest.RestController): @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Executions, types.uuid, types.uuid, int, types.uniquelist, types.list, types.uniquelist, wtypes.text, types.uuid, wtypes.text, types.uniquelist, types.jsontype, STATE_TYPES, wtypes.text, types.jsontype, types.jsontype, wtypes.text, wtypes.text) def get_all(self, task_execution_id, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', workflow_name=None, workflow_id=None, description=None, tags=None, params=None, state=None, state_info=None, input=None, output=None, created_at=None, updated_at=None): """Return all executions that belong to the given task execution. :param task_execution_id: Task task execution ID. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at, which is backward compatible. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: desc. The length of sort_dirs can be equal or less than that of sort_keys. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param workflow_name: Optional. Keep only resources with a specific workflow name. :param workflow_id: Optional. Keep only resources with a specific workflow ID. :param description: Optional. Keep only resources with a specific description. :param tags: Optional. Keep only resources containing specific tags. :param params: Optional. Keep only resources with specific parameters. :param state: Optional. Keep only resources with a specific state. :param state_info: Optional. Keep only resources with specific state information. :param input: Optional. Keep only resources with a specific input. :param output: Optional. Keep only resources with a specific output. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. """ acl.enforce('executions:list', context.ctx()) filters = filter_utils.create_filters_from_request_params( task_execution_id=task_execution_id, created_at=created_at, workflow_name=workflow_name, workflow_id=workflow_id, tags=tags, params=params, state=state, state_info=state_info, input=input, output=output, updated_at=updated_at, description=description ) LOG.debug( "Fetch executions. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, filters=%s", marker, limit, sort_keys, sort_dirs, filters ) return rest_utils.get_all( resources.Executions, resources.Execution, db_api.get_workflow_executions, db_api.get_workflow_execution, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, **filters ) class TasksController(rest.RestController): action_executions = action_execution.TasksActionExecutionController() workflow_executions = TaskExecutionsController() executions = sub_execution.SubExecutionsController() @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Task, wtypes.text) def get(self, id): """Return the specified task. :param id: UUID of task to retrieve """ acl.enforce('tasks:get', context.ctx()) LOG.debug("Fetch task [id=%s]", id) task, task_ex = _get_task_execution(id) return _task_with_published_global(task, task_ex) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Tasks, types.uuid, int, types.uniquelist, types.list, types.uniquelist, wtypes.text, wtypes.text, types.uuid, types.uuid, types.uniquelist, STATE_TYPES, wtypes.text, wtypes.text, types.jsontype, bool, wtypes.text, wtypes.text, bool, types.jsontype) def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', name=None, workflow_name=None, workflow_id=None, workflow_execution_id=None, tags=None, state=None, state_info=None, result=None, published=None, processed=None, created_at=None, updated_at=None, reset=None, env=None): """Return all tasks. Where project_id is the same as the requester or project_id is different but the scope is public. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at, which is backward compatible. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: desc. The length of sort_dirs can be equal or less than that of sort_keys. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param name: Optional. Keep only resources with a specific name. :param workflow_name: Optional. Keep only resources with a specific workflow name. :param workflow_id: Optional. Keep only resources with a specific workflow ID. :param workflow_execution_id: Optional. Keep only resources with a specific workflow execution ID. :param state: Optional. Keep only resources with a specific state. :param state_info: Optional. Keep only resources with specific state information. :param result: Optional. Keep only resources with a specific result. :param published: Optional. Keep only resources with specific published content. :param processed: Optional. Keep only resources which have been processed or not. :param reset: Optional. Keep only resources which have been reset or not. :param env: Optional. Keep only resources with a specific environment. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. """ acl.enforce('tasks:list', context.ctx()) filters = filter_utils.create_filters_from_request_params( created_at=created_at, workflow_name=workflow_name, workflow_id=workflow_id, tags=tags, state=state, state_info=state_info, updated_at=updated_at, name=name, workflow_execution_id=workflow_execution_id, result=result, published=published, processed=processed, reset=reset, env=env ) LOG.debug( "Fetch tasks. marker=%s, limit=%s, sort_keys=%s, sort_dirs=%s," " filters=%s", marker, limit, sort_keys, sort_dirs, filters ) return rest_utils.get_all( resources.Tasks, resources.Task, db_api.get_task_executions, db_api.get_task_execution, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, **filters ) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Task, wtypes.text, body=resources.Task) def put(self, id, task): """Update the specified task execution. :param id: Task execution ID. :param task: Task execution object. """ acl.enforce('tasks:update', context.ctx()) LOG.info("Update task execution [id=%s, task=%s]", id, task) @rest_utils.rest_retry_on_db_error def _read_task_params(id, task): with db_api.transaction(): task_ex = db_api.get_task_execution(id) task_spec = spec_parser.get_task_spec(task_ex.spec) task_name = task.name or None reset = task.reset env = task.env or None if task_name and task_name != task_ex.name: raise exc.WorkflowException('Task name does not match.') wf_ex = db_api.get_workflow_execution( task_ex.workflow_execution_id ) root_execution_id = wf_ex.root_execution_id if not root_execution_id: root_execution_id = wf_ex.id context.ctx(root_execution_id=root_execution_id) return env, reset, task_ex, task_spec, wf_ex env, reset, task_ex, task_spec, wf_ex = _read_task_params(id, task) wf_name = task.workflow_name or None if wf_name and wf_name != wf_ex.name: raise exc.WorkflowException('Workflow name does not match.') if task.state != states.RUNNING: raise exc.WorkflowException( 'Invalid task state. ' 'Only updating task to rerun is supported.' ) if task_ex.state != states.ERROR: raise exc.WorkflowException( 'The current task execution must be in ERROR for rerun.' ' Only updating task to rerun is supported.' ) if not task_spec.get_with_items() and not reset: raise exc.WorkflowException( 'Only with-items task has the option to not reset.' ) rpc.get_engine_client().rerun_workflow( task_ex.id, reset=reset, env=env ) @rest_utils.rest_retry_on_db_error def _retrieve_task(): with db_api.transaction(): task_ex = db_api.get_task_execution(id) return _get_task_resource_with_result(task_ex) return _retrieve_task() class ExecutionTasksController(rest.RestController): @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Tasks, types.uuid, types.uuid, int, types.uniquelist, types.list, types.uniquelist, wtypes.text, wtypes.text, types.uuid, types.uniquelist, STATE_TYPES, wtypes.text, wtypes.text, types.jsontype, bool, wtypes.text, wtypes.text, bool, types.jsontype) def get_all(self, workflow_execution_id, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', name=None, workflow_name=None, workflow_id=None, tags=None, state=None, state_info=None, result=None, published=None, processed=None, created_at=None, updated_at=None, reset=None, env=None): """Return all tasks within the execution. Where project_id is the same as the requester or project_id is different but the scope is public. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at, which is backward compatible. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: desc. The length of sort_dirs can be equal or less than that of sort_keys. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param name: Optional. Keep only resources with a specific name. :param workflow_name: Optional. Keep only resources with a specific workflow name. :param workflow_id: Optional. Keep only resources with a specific workflow ID. :param workflow_execution_id: Optional. Keep only resources with a specific workflow execution ID. :param tags: Optional. Keep only resources containing specific tags. :param state: Optional. Keep only resources with a specific state. :param state_info: Optional. Keep only resources with specific state information. :param result: Optional. Keep only resources with a specific result. :param published: Optional. Keep only resources with specific published content. :param processed: Optional. Keep only resources which have been processed or not. :param reset: Optional. Keep only resources which have been reset or not. :param env: Optional. Keep only resources with a specific environment. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. """ acl.enforce('tasks:list', context.ctx()) with db_api.transaction(): db_api.get_workflow_execution( workflow_execution_id, fields=(db_models.WorkflowExecution.id,) ) filters = filter_utils.create_filters_from_request_params( workflow_execution_id=workflow_execution_id, created_at=created_at, workflow_name=workflow_name, workflow_id=workflow_id, tags=tags, state=state, state_info=state_info, updated_at=updated_at, name=name, result=result, published=published, processed=processed, reset=reset, env=env ) LOG.debug( "Fetch tasks. workflow_execution_id=%s, marker=%s, limit=%s, " "sort_keys=%s, sort_dirs=%s, filters=%s", workflow_execution_id, marker, limit, sort_keys, sort_dirs, filters ) return rest_utils.get_all( resources.Tasks, resources.Task, db_api.get_task_executions, db_api.get_task_execution, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, **filters )