Files
deb-python-taskflow/taskflow/engines/action_engine/executor.py
Joshua Harlow 5f0b514a14 Stop returning atoms from execute/revert methods
It is not needed to return the atom that was executed from the
futures result() method, since we can just as easily set an
attribute on the future and reference it from there when using
it later. This is also required for a process based executor since
it is not typically possible to send back a raw task object (and
is not desireable to require this); even if it was possible the
task would be pickled and unpickled multiple times so when this
happens it can not be guaranteed to even be the same object (in
fact it is not).

Part of blueprint process-executor

Change-Id: I4a05ea5dcdef97218312e3a88ed4a1dfdf1b1edf
2014-12-06 15:03:58 -08:00

157 lines
5.1 KiB
Python

# -*- coding: utf-8 -*-
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from taskflow import task as _task
from taskflow.types import failure
from taskflow.types import futures
from taskflow.utils import async_utils
from taskflow.utils import threading_utils
# Execution and reversion events.
EXECUTED = 'executed'
REVERTED = 'reverted'
def _execute_task(task, arguments, progress_callback):
with task.autobind(_task.EVENT_UPDATE_PROGRESS, progress_callback):
try:
task.pre_execute()
result = task.execute(**arguments)
except Exception:
# NOTE(imelnikov): wrap current exception with Failure
# object and return it.
result = failure.Failure()
finally:
task.post_execute()
return (EXECUTED, result)
def _revert_task(task, arguments, result, failures, progress_callback):
kwargs = arguments.copy()
kwargs[_task.REVERT_RESULT] = result
kwargs[_task.REVERT_FLOW_FAILURES] = failures
with task.autobind(_task.EVENT_UPDATE_PROGRESS, progress_callback):
try:
task.pre_revert()
result = task.revert(**kwargs)
except Exception:
# NOTE(imelnikov): wrap current exception with Failure
# object and return it.
result = failure.Failure()
finally:
task.post_revert()
return (REVERTED, result)
@six.add_metaclass(abc.ABCMeta)
class TaskExecutorBase(object):
"""Executes and reverts tasks.
This class takes task and its arguments and executes or reverts it.
It encapsulates knowledge on how task should be executed or reverted:
right now, on separate thread, on another machine, etc.
"""
@abc.abstractmethod
def execute_task(self, task, task_uuid, arguments, progress_callback=None):
"""Schedules task execution."""
@abc.abstractmethod
def revert_task(self, task, task_uuid, arguments, result, failures,
progress_callback=None):
"""Schedules task reversion."""
@abc.abstractmethod
def wait_for_any(self, fs, timeout=None):
"""Wait for futures returned by this executor to complete."""
def start(self):
"""Prepare to execute tasks."""
pass
def stop(self):
"""Finalize task executor."""
pass
class SerialTaskExecutor(TaskExecutorBase):
"""Execute task one after another."""
def __init__(self):
self._executor = futures.SynchronousExecutor()
def execute_task(self, task, task_uuid, arguments, progress_callback=None):
fut = self._executor.submit(_execute_task, task, arguments,
progress_callback)
fut.atom = task
return fut
def revert_task(self, task, task_uuid, arguments, result, failures,
progress_callback=None):
fut = self._executor.submit(_revert_task, task, arguments, result,
failures, progress_callback)
fut.atom = task
return fut
def wait_for_any(self, fs, timeout=None):
return async_utils.wait_for_any(fs, timeout)
class ParallelTaskExecutor(TaskExecutorBase):
"""Executes tasks in parallel.
Submits tasks to an executor which should provide an interface similar
to concurrent.Futures.Executor.
"""
def __init__(self, executor=None, max_workers=None):
self._executor = executor
self._max_workers = max_workers
self._create_executor = executor is None
def execute_task(self, task, task_uuid, arguments, progress_callback=None):
fut = self._executor.submit(_execute_task, task,
arguments, progress_callback)
fut.atom = task
return fut
def revert_task(self, task, task_uuid, arguments, result, failures,
progress_callback=None):
fut = self._executor.submit(_revert_task, task, arguments,
result, failures, progress_callback)
fut.atom = task
return fut
def wait_for_any(self, fs, timeout=None):
return async_utils.wait_for_any(fs, timeout)
def start(self):
if self._create_executor:
if self._max_workers is not None:
max_workers = self._max_workers
else:
max_workers = threading_utils.get_optimal_thread_count()
self._executor = futures.ThreadPoolExecutor(max_workers)
def stop(self):
if self._create_executor:
self._executor.shutdown(wait=True)
self._executor = None