Merge "Store next task names in DB"

This commit is contained in:
Zuul 2019-06-11 09:40:19 +00:00 committed by Gerrit Code Review
commit eb97410610
6 changed files with 173 additions and 175 deletions

View File

@ -0,0 +1,38 @@
# Copyright 2019 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add next_tasks to task execution.
Revision ID: 033
Revises: 032
Create Date: 2019-06-06 13:42:12.123412
"""
# revision identifiers, used by Alembic.
revision = '033'
down_revision = '032'
from alembic import op
import sqlalchemy as sa
from mistral.db.sqlalchemy import types as st
def upgrade():
op.add_column(
'task_executions_v2',
sa.Column('next_tasks', st.JsonListType(), nullable=True)
)

View File

@ -269,6 +269,10 @@ class TaskExecution(Execution):
# is not completed.
has_next_tasks = sa.Column(sa.Boolean, default=False)
# The names of the next tasks.
# [(task_name, event)]
next_tasks = sa.Column(st.JsonListType())
# Set to True if the task finished with an error and the error
# is handled (e.g. with 'on-error' clause for direct workflows)
# so that the error shouldn't bubble up to the workflow level.

View File

@ -275,6 +275,29 @@ class Task(object):
if self.task_ex.state == states.RUNNING_DELAYED:
return
wf_ctrl = wf_base.get_controller(self.wf_ex, self.wf_spec)
# Calculate commands to process next.
cmds = wf_ctrl.continue_workflow(task_ex=self.task_ex)
# Save next task names in DB to avoid evaluating them again
# in the future.
self.task_ex.next_tasks = []
for c in cmds:
if commands.is_engine_command(c):
continue
event = c.triggered_by[0]['event'] if c.triggered_by else None
self.task_ex.next_tasks.append((c.task_spec.get_name(), event))
self.task_ex.has_next_tasks = bool(self.task_ex.next_tasks)
# Check whether the error is handled.
if self.task_ex.state == states.ERROR:
self.task_ex.error_handled = any([c.handles_error for c in cmds])
# If workflow is paused we shouldn't schedule new commands
# and mark task as processed.
if states.is_paused(self.wf_ex.state):
@ -283,19 +306,6 @@ class Task(object):
return
wf_ctrl = wf_base.get_controller(self.wf_ex, self.wf_spec)
# Calculate commands to process next.
cmds = wf_ctrl.continue_workflow(task_ex=self.task_ex)
# Check whether the task generated any next tasks.
if any([not commands.is_engine_command(c) for c in cmds]):
self.task_ex.has_next_tasks = True
# Check whether the error is handled.
if self.task_ex.state == states.ERROR:
self.task_ex.error_handled = any([c.handles_error for c in cmds])
# Mark task as processed after all decisions have been made
# upon its completion.
self.task_ex.processed = True

View File

@ -21,44 +21,46 @@ from mistral_lib import actions as ml_actions
class SubworkflowPauseResumeTest(base.EngineTestCase):
def test_pause_resume_cascade_down_to_subworkflow(self):
workbook = """
wb_text = """
version: '2.0'
name: wb
workflows:
wf1:
tasks:
task1:
workflow: wf2
on-success:
- task3
on-success: task3
task2:
workflow: wf3
on-success:
- task3
on-success: task3
task3:
join: all
action: std.noop
wf2:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
wf3:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
"""
wb_service.create_workbook_v2(workbook)
wb_service.create_workbook_v2(wb_text)
# Start workflow execution.
wf_1_ex = self.engine.start_workflow('wb.wf1')
@ -148,8 +150,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -167,8 +167,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -180,8 +178,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -216,8 +212,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -235,8 +229,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -248,8 +240,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -323,10 +313,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
name='task1'
)
wf_2_task_1_action_exs = db_api.get_action_executions(
task_execution_id=wf_2_task_1_ex.id
)
wf_2_task_2_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task2'
@ -341,10 +327,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
name='task1'
)
wf_3_task_1_action_exs = db_api.get_action_executions(
task_execution_id=wf_3_task_1_ex.id
)
wf_3_task_2_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task2'
@ -367,42 +349,45 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
self.assertEqual(states.SUCCESS, wf_3_task_2_ex.state)
def test_pause_resume_cascade_up_from_subworkflow(self):
workbook = """
wb_text = """
version: '2.0'
name: wb
workflows:
wf1:
tasks:
task1:
workflow: wf2
on-success:
- task3
on-success: task3
task2:
workflow: wf3
on-success:
- task3
on-success: task3
task3:
join: all
action: std.noop
wf2:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
wf3:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
"""
wb_service.create_workbook_v2(workbook)
wb_service.create_workbook_v2(wb_text)
# Start workflow execution.
wf_1_ex = self.engine.start_workflow('wb.wf1')
@ -490,8 +475,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -509,8 +492,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -522,8 +503,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -556,8 +535,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -575,8 +552,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -588,8 +563,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -626,8 +599,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -645,8 +616,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -658,8 +627,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -692,8 +659,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -711,8 +676,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -724,8 +687,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -793,10 +754,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
name='task1'
)
wf_2_task_1_action_exs = db_api.get_action_executions(
task_execution_id=wf_2_task_1_ex.id
)
wf_2_task_2_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task2'
@ -811,10 +768,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
name='task1'
)
wf_3_task_1_action_exs = db_api.get_action_executions(
task_execution_id=wf_3_task_1_ex.id
)
wf_3_task_2_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task2'
@ -837,43 +790,46 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
self.assertEqual(states.SUCCESS, wf_3_task_2_ex.state)
def test_pause_resume_cascade_down_to_with_items_subworkflows(self):
workbook = """
wb_text = """
version: '2.0'
name: wb
workflows:
wf1:
tasks:
task1:
with-items: i in <% range(3) %>
workflow: wf2
on-success:
- task3
on-success: task3
task2:
workflow: wf3
on-success:
- task3
on-success: task3
task3:
join: all
action: std.noop
wf2:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
wf3:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
"""
wb_service.create_workbook_v2(workbook)
wb_service.create_workbook_v2(wb_text)
# Start workflow execution.
wf_1_ex = self.engine.start_workflow('wb.wf1')
@ -1371,43 +1327,46 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
self.assertEqual(states.SUCCESS, wf_3_task_1_action_exs[0].state)
def test_pause_resume_cascade_up_from_with_items_subworkflow(self):
workbook = """
wb_text = """
version: '2.0'
name: wb
workflows:
wf1:
tasks:
task1:
with-items: i in <% range(3) %>
workflow: wf2
on-success:
- task3
on-success: task3
task2:
workflow: wf3
on-success:
- task3
on-success: task3
task3:
join: all
action: std.noop
wf2:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
wf3:
tasks:
task1:
action: std.async_noop
on-success:
- task2
on-success: task2
task2:
action: std.noop
"""
wb_service.create_workbook_v2(workbook)
wb_service.create_workbook_v2(wb_text)
# Start workflow execution.
wf_1_ex = self.engine.start_workflow('wb.wf1')
@ -1752,8 +1711,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_1_task_1_action_exs[2].id
)
wf_2_ex_3_task_execs = wf_2_ex_3.task_executions
wf_2_ex_3_task_1_ex = self._assert_single_item(
wf_2_ex_3.task_executions,
name='task1'
@ -1766,8 +1723,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the wf3 subworkflow execution.
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -1847,8 +1802,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -1871,8 +1824,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_1_task_1_action_exs[0].id
)
wf_2_ex_1_task_execs = wf_2_ex_1.task_executions
wf_2_ex_1_task_1_ex = self._assert_single_item(
wf_2_ex_1.task_executions,
name='task1'
@ -1886,8 +1837,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_1_task_1_action_exs[1].id
)
wf_2_ex_2_task_execs = wf_2_ex_2.task_executions
wf_2_ex_2_task_1_ex = self._assert_single_item(
wf_2_ex_2.task_executions,
name='task1'
@ -1901,8 +1850,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_1_task_1_action_exs[2].id
)
wf_2_ex_3_task_execs = wf_2_ex_3.task_executions
wf_2_ex_3_task_1_ex = self._assert_single_item(
wf_2_ex_3.task_executions,
name='task1'
@ -1915,8 +1862,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the wf3 subworkflow execution.
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -1956,43 +1901,46 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
self.assertEqual(states.SUCCESS, wf_3_task_1_action_exs[0].state)
def test_pause_resume_cascade_up_from_subworkflow_pause_before(self):
workbook = """
wb_text = """
version: '2.0'
name: wb
workflows:
wf1:
tasks:
task1:
workflow: wf2
on-success:
- task3
task2:
workflow: wf3
on-success:
- task3
task3:
join: all
action: std.noop
wf2:
tasks:
task1:
action: std.noop
on-success:
- task2
task2:
pause-before: true
action: std.async_noop
wf3:
tasks:
task1:
action: std.async_noop
on-success:
- task2
task2:
action: std.noop
wf1:
tasks:
task1:
workflow: wf2
on-success: task3
task2:
workflow: wf3
on-success: task3
task3:
join: all
wf2:
tasks:
task1:
action: std.noop
on-success: task2
task2:
pause-before: true
action: std.async_noop
wf3:
tasks:
task1:
action: std.async_noop
on-success: task2
task2:
action: std.noop
"""
wb_service.create_workbook_v2(workbook)
wb_service.create_workbook_v2(wb_text)
# Start workflow execution.
wf_1_ex = self.engine.start_workflow('wb.wf1')
@ -2005,8 +1953,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -2024,8 +1970,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -2046,8 +1990,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'
@ -2084,8 +2026,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the parent workflow execution.
wf_1_ex = self._assert_single_item(wf_execs, name='wb.wf1')
wf_1_task_execs = wf_1_ex.task_executions
wf_1_task_1_ex = self._assert_single_item(
wf_1_ex.task_executions,
name='task1'
@ -2103,8 +2043,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
# Get objects for the subworkflow executions.
wf_2_ex = self._assert_single_item(wf_execs, name='wb.wf2')
wf_2_task_execs = wf_2_ex.task_executions
wf_2_task_1_ex = self._assert_single_item(
wf_2_ex.task_executions,
name='task1'
@ -2125,8 +2063,6 @@ class SubworkflowPauseResumeTest(base.EngineTestCase):
wf_3_ex = self._assert_single_item(wf_execs, name='wb.wf3')
wf_3_task_execs = wf_3_ex.task_executions
wf_3_task_1_ex = self._assert_single_item(
wf_3_ex.task_executions,
name='task1'

View File

@ -128,7 +128,7 @@ class WorkflowController(object):
mistral.workflow.commands.WorkflowCommand).
"""
if self._is_paused_or_completed():
if self._is_completed():
return []
return self._find_next_commands(task_ex)
@ -257,6 +257,9 @@ class WorkflowController(object):
for t in self._get_task_executions(state=states.IDLE)
]
def _is_completed(self):
return states.is_completed(self.wf_ex.state)
def _is_paused_or_completed(self):
return states.is_paused_or_completed(self.wf_ex.state)

View File

@ -341,7 +341,10 @@ class DirectWorkflowController(base.WorkflowController):
if not condition or expr.evaluate(condition, ctx)
]
@profiler.trace('direct-wf-controller-get-join-logical-state')
@profiler.trace(
'direct-wf-controller-get-join-logical-state',
hide_args=True
)
def _get_join_logical_state(self, task_spec):
"""Evaluates logical state of 'join' task.
@ -476,6 +479,10 @@ class DirectWorkflowController(base.WorkflowController):
# TODO(rakhmerov): Method signature is incorrect given that
# we may have multiple task executions for a task. It should
# accept inbound task execution rather than a spec.
@profiler.trace(
'direct-wf-controller-get-induced-join-state',
hide_args=True
)
def _get_induced_join_state(self, in_task_spec, in_task_ex,
join_task_spec, t_execs_cache):
join_task_name = join_task_spec.get_name()
@ -494,10 +501,10 @@ class DirectWorkflowController(base.WorkflowController):
if not states.is_completed(in_task_ex.state):
return states.WAITING, 1, None
# [(task name, params, event name), ...]
next_tasks_tuples = self._find_next_tasks(in_task_ex)
# [(task name, event name), ...]
next_tasks_tuples = in_task_ex.next_tasks or []
next_tasks_dict = {tup[0]: tup[2] for tup in next_tasks_tuples}
next_tasks_dict = {tup[0]: tup[1] for tup in next_tasks_tuples}
if join_task_name not in next_tasks_dict:
return states.ERROR, 1, "not triggered"
@ -533,7 +540,7 @@ class DirectWorkflowController(base.WorkflowController):
if not states.is_completed(t_ex.state):
return True, depth
if t_name in [t[0] for t in self._find_next_tasks(t_ex)]:
if t_name in [t[0] for t in t_ex.next_tasks]:
return True, depth
return False, depth
@ -561,7 +568,7 @@ class DirectWorkflowController(base.WorkflowController):
t_execs_cache = {
t_ex.name: t_ex for t_ex in self._get_task_executions(
fields=('id', 'name', 'state'),
fields=('id', 'name', 'state', 'next_tasks'),
name={'in': names}
)
} if names else {} # don't perform a db request if 'names' are empty