Merge branch 'master' of github.com:yahoo/TaskFlow into sql
This commit is contained in:
@@ -91,10 +91,11 @@ class MemoryCatalog(catalog.Catalog):
|
||||
self._catalogs = [(j, b) for (j, b) in self._catalogs if j != job]
|
||||
|
||||
|
||||
class MemoryWorkflowDetail(logbook.WorkflowDetail):
|
||||
def __init__(self, book, name):
|
||||
super(MemoryWorkflowDetail, self).__init__(book, name)
|
||||
class MemoryFlowDetail(logbook.FlowDetail):
|
||||
def __init__(self, book, name, task_cls=logbook.TaskDetail):
|
||||
super(MemoryFlowDetail, self).__init__(book, name)
|
||||
self._tasks = []
|
||||
self._task_cls = task_cls
|
||||
|
||||
def __iter__(self):
|
||||
for t in self._tasks:
|
||||
@@ -106,64 +107,66 @@ class MemoryWorkflowDetail(logbook.WorkflowDetail):
|
||||
return True
|
||||
return False
|
||||
|
||||
def fetch_tasks(self, task_name):
|
||||
def __getitem__(self, task_name):
|
||||
return [t for t in self if t.name == task_name]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._tasks)
|
||||
|
||||
def add_task(self, task_details):
|
||||
def add_task(self, task_name):
|
||||
task_details = self._task_cls(task_name)
|
||||
self._tasks.append(task_details)
|
||||
return task_details
|
||||
|
||||
def delete_tasks(self, task_name):
|
||||
def __delitem__(self, task_name):
|
||||
self._tasks = [t for t in self if t.name != task_name]
|
||||
|
||||
|
||||
class MemoryLogBook(logbook.LogBook):
|
||||
def __init__(self):
|
||||
super(MemoryLogBook, self).__init__()
|
||||
self._workflows = []
|
||||
self._workflow_names = set()
|
||||
self._flows = []
|
||||
self._flow_names = set()
|
||||
self._closed = False
|
||||
|
||||
@check_not_closed
|
||||
def add_workflow(self, workflow_name):
|
||||
if workflow_name in self._workflow_names:
|
||||
def add_flow(self, flow_name):
|
||||
if flow_name in self._flow_names:
|
||||
raise exc.AlreadyExists()
|
||||
self._workflows.append(MemoryWorkflowDetail(self, workflow_name))
|
||||
self._workflow_names.add(workflow_name)
|
||||
f = MemoryFlowDetail(self, flow_name)
|
||||
self._flows.append(f)
|
||||
self._flow_names.add(flow_name)
|
||||
return f
|
||||
|
||||
@check_not_closed
|
||||
def fetch_workflow(self, workflow_name):
|
||||
if workflow_name not in self._workflow_names:
|
||||
def __getitem__(self, flow_name):
|
||||
if flow_name not in self._flow_names:
|
||||
raise exc.NotFound()
|
||||
for w in self._workflows:
|
||||
if w.name == workflow_name:
|
||||
for w in self._flows:
|
||||
if w.name == flow_name:
|
||||
return w
|
||||
|
||||
@check_not_closed
|
||||
def __iter__(self):
|
||||
for w in self._workflows:
|
||||
for w in self._flows:
|
||||
yield w
|
||||
|
||||
def close(self):
|
||||
self._closed = True
|
||||
|
||||
@check_not_closed
|
||||
def __contains__(self, workflow_name):
|
||||
try:
|
||||
self.fetch_workflow(workflow_name)
|
||||
return True
|
||||
except exc.NotFound:
|
||||
def __contains__(self, flow_name):
|
||||
if flow_name not in self._flow_names:
|
||||
return False
|
||||
return True
|
||||
|
||||
def delete_workflow(self, workflow_name):
|
||||
w = self.fetch_workflow(workflow_name)
|
||||
self._workflow_names.remove(workflow_name)
|
||||
self._workflows.remove(w)
|
||||
def __delitem__(self, flow_name):
|
||||
w = self[flow_name]
|
||||
self._flow_names.remove(flow_name)
|
||||
self._flows.remove(w)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._workflows)
|
||||
return len(self._flows)
|
||||
|
||||
|
||||
class MemoryJobBoard(jobboard.JobBoard):
|
||||
|
||||
@@ -26,10 +26,10 @@ class TaskException(TaskFlowException):
|
||||
"""When a task failure occurs the following object will be given to revert
|
||||
and can be used to interrogate what caused the failure."""
|
||||
|
||||
def __init__(self, task, workflow=None, cause=None):
|
||||
def __init__(self, task, flow=None, cause=None):
|
||||
super(TaskException, self).__init__()
|
||||
self.task = task
|
||||
self.workflow = workflow
|
||||
self.flow = flow
|
||||
self.cause = cause
|
||||
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ import uuid
|
||||
from taskflow import exceptions as exc
|
||||
from taskflow import logbook
|
||||
from taskflow import states
|
||||
from taskflow import utils
|
||||
|
||||
|
||||
class Claimer(object):
|
||||
@@ -53,7 +54,7 @@ class Job(object):
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
def __init__(self, name, context, catalog, claimer):
|
||||
def __init__(self, name, context, catalog, claimer, jid=None):
|
||||
self.name = name
|
||||
self.context = context
|
||||
self.owner = None
|
||||
@@ -61,7 +62,10 @@ class Job(object):
|
||||
self._catalog = catalog
|
||||
self._claimer = claimer
|
||||
self._logbook = None
|
||||
self._id = str(uuid.uuid4().hex)
|
||||
if not jid:
|
||||
self._id = str(uuid.uuid4().hex)
|
||||
else:
|
||||
self._id = str(jid)
|
||||
self._state = states.UNCLAIMED
|
||||
|
||||
def __str__(self):
|
||||
@@ -97,10 +101,10 @@ class Job(object):
|
||||
def wf_state_change_listener(context, wf, old_state):
|
||||
if wf.name in self.logbook:
|
||||
return
|
||||
self.logbook.add_workflow(wf.name)
|
||||
self.logbook.add_flow(wf.name)
|
||||
|
||||
def task_result_fetcher(context, wf, task):
|
||||
wf_details = self.logbook.fetch_workflow(wf.name)
|
||||
wf_details = self.logbook[wf.name]
|
||||
# See if it completed before so that we can use its results instead
|
||||
# of having to recompute them.
|
||||
td_name = task_state_name_functor(task, states.SUCCESS)
|
||||
@@ -108,21 +112,22 @@ class Job(object):
|
||||
# TODO(harlowja): should we be a little more cautious about
|
||||
# duplicate task results? Maybe we shouldn't allow them to
|
||||
# have the same name in the first place?
|
||||
task_details = wf_details.fetch_tasks(td_name)[0]
|
||||
task_details = wf_details[td_name][0]
|
||||
if task_details.metadata and 'result' in task_details.metadata:
|
||||
return (True, task_details.metadata['result'])
|
||||
return (False, None)
|
||||
|
||||
def task_state_change_listener(context, state, wf, task, result=None):
|
||||
metadata = None
|
||||
wf_details = self.logbook.fetch_workflow(wf.name)
|
||||
wf_details = self.logbook[wf.name]
|
||||
if state == states.SUCCESS:
|
||||
metadata = {
|
||||
'result': result,
|
||||
}
|
||||
td_name = task_state_name_functor(task, state)
|
||||
if td_name not in wf_details:
|
||||
wf_details.add_task(logbook.TaskDetail(td_name, metadata))
|
||||
td_details = wf_details.add_task(td_name)
|
||||
td_details.metadata = metadata
|
||||
|
||||
wf.task_listeners.append(task_state_change_listener)
|
||||
wf.listeners.append(wf_state_change_listener)
|
||||
@@ -170,22 +175,13 @@ class Job(object):
|
||||
def await(self, timeout=None):
|
||||
"""Awaits until either the job fails or succeeds or the provided
|
||||
timeout is reached."""
|
||||
if timeout is not None:
|
||||
end_time = time.time() + max(0, timeout)
|
||||
else:
|
||||
end_time = None
|
||||
# Use the same/similar scheme that the python condition class uses.
|
||||
delay = 0.0005
|
||||
while self.state not in (states.FAILURE, states.SUCCESS):
|
||||
time.sleep(delay)
|
||||
if end_time is not None:
|
||||
remaining = end_time - time.time()
|
||||
if remaining <= 0:
|
||||
return False
|
||||
delay = min(delay * 2, remaining, 0.05)
|
||||
else:
|
||||
delay = min(delay * 2, 0.05)
|
||||
return True
|
||||
|
||||
def check_functor():
|
||||
if self.state not in (states.FAILURE, states.SUCCESS):
|
||||
return False
|
||||
return True
|
||||
|
||||
return utils.await(check_functor, timeout)
|
||||
|
||||
@property
|
||||
def tracking_id(self):
|
||||
|
||||
@@ -29,14 +29,15 @@ class TaskDetail(object):
|
||||
self.date_created = datetime.utcnow()
|
||||
self.name = name
|
||||
self.metadata = metadata
|
||||
self.date_updated = None
|
||||
|
||||
def __str__(self):
|
||||
return "TaskDetail (%s, %s): %s" % (self.name, self.date_created,
|
||||
self.metadata)
|
||||
|
||||
|
||||
class WorkflowDetail(object):
|
||||
"""Workflow details have the bare minimum of these fields/methods."""
|
||||
class FlowDetail(object):
|
||||
"""Flow details have the bare minimum of these fields/methods."""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@@ -54,31 +55,32 @@ class WorkflowDetail(object):
|
||||
@abc.abstractmethod
|
||||
def __contains__(self, task_name):
|
||||
"""Determines if any task details with the given name exists in this
|
||||
workflow details."""
|
||||
flow details."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def fetch_tasks(self, task_name):
|
||||
def __getitem__(self, task_name):
|
||||
"""Fetch any task details that match the given task name."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_task(self, task_details):
|
||||
"""Adds a task detail entry to this workflow details."""
|
||||
def add_task(self, task_name):
|
||||
"""Atomically creates a new task detail entry to this flows details and
|
||||
returns it for further use."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_tasks(self, task_name):
|
||||
def __delitem__(self, task_name):
|
||||
"""Deletes any task details that match the given task name."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def __len__(self):
|
||||
"""Returns how many task details objects the workflow contains."""
|
||||
"""Returns how many task details objects the flow contains."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __str__(self):
|
||||
return "WorkflowDetail (%s): %s entries" % (self.name, len(self))
|
||||
return "FlowDetail (%s): %s entries" % (self.name, len(self))
|
||||
|
||||
|
||||
class LogBook(object):
|
||||
@@ -87,42 +89,42 @@ class LogBook(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_workflow(self, workflow_name):
|
||||
"""Atomically adds a new workflow details object to the given logbook
|
||||
or raises an exception if that workflow (or a workflow with
|
||||
that name) already exists.
|
||||
def add_flow(self, flow_name):
|
||||
"""Atomically adds and returns a new flow details object to the given
|
||||
logbook or raises an exception if that flow (or a flow with that name)
|
||||
already exists.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def fetch_workflow(self, workflow_name):
|
||||
"""Fetches the given workflow details object for the given workflow
|
||||
name or raises an exception if that workflow name does not exist."""
|
||||
def __getitem__(self, flow_name):
|
||||
"""Fetches the given flow details object for the given flow
|
||||
name or raises an exception if that flow name does not exist."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def __contains__(self, workflow_name):
|
||||
"""Determines if a workflow details object with the given workflow name
|
||||
def __contains__(self, flow_name):
|
||||
"""Determines if a flow details object with the given flow name
|
||||
exists in this logbook."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_workflow(self, workflow_name):
|
||||
"""Removes the given workflow details object that matches the provided
|
||||
workflow name or raises an exception if that workflow name does not
|
||||
def __delitem__(self, flow_name):
|
||||
"""Removes the given flow details object that matches the provided
|
||||
flow name or raises an exception if that flow name does not
|
||||
exist."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def __iter__(self):
|
||||
"""Iterates over all the contained workflow details.
|
||||
"""Iterates over all the contained flow details.
|
||||
|
||||
The order will be in the same order that they were added."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def __len__(self):
|
||||
"""Returns how many workflow details the logbook contains."""
|
||||
"""Returns how many flow details the logbook contains."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
|
||||
@@ -25,18 +25,18 @@ from networkx.algorithms import dag
|
||||
from networkx.classes import digraph
|
||||
|
||||
from taskflow import exceptions as exc
|
||||
from taskflow.patterns import ordered_workflow
|
||||
from taskflow.patterns import ordered_flow
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Workflow(ordered_workflow.Workflow):
|
||||
"""A workflow which will analyze the attached tasks input requirements and
|
||||
class Flow(ordered_flow.Flow):
|
||||
"""A flow which will analyze the attached tasks input requirements and
|
||||
determine who provides said input and order the task so that said providing
|
||||
task will be ran before."""
|
||||
|
||||
def __init__(self, name, tolerant=False, parents=None):
|
||||
super(Workflow, self).__init__(name, tolerant, parents)
|
||||
super(Flow, self).__init__(name, tolerant, parents)
|
||||
self._graph = digraph.DiGraph()
|
||||
self._connected = False
|
||||
|
||||
@@ -49,9 +49,20 @@ class Workflow(ordered_workflow.Workflow):
|
||||
self._graph.add_node(task)
|
||||
self._connected = False
|
||||
|
||||
def _fetch_task_inputs(self, task):
|
||||
task_needs = task.requires()
|
||||
if not task_needs:
|
||||
return None
|
||||
inputs = {}
|
||||
for (_who, there_result) in self.results:
|
||||
for n in task_needs:
|
||||
if there_result and n in there_result:
|
||||
inputs[n] = there_result[n]
|
||||
return inputs
|
||||
|
||||
def run(self, context, *args, **kwargs):
|
||||
self.connect()
|
||||
return super(Workflow, self).run(context, *args, **kwargs)
|
||||
return super(Flow, self).run(context, *args, **kwargs)
|
||||
|
||||
def order(self):
|
||||
self.connect()
|
||||
@@ -60,27 +71,39 @@ class Workflow(ordered_workflow.Workflow):
|
||||
except g_exc.NetworkXUnfeasible:
|
||||
raise exc.InvalidStateException("Unable to correctly determine "
|
||||
"the path through the provided "
|
||||
"workflow which will satisfy the "
|
||||
"flow which will satisfy the "
|
||||
"tasks needed inputs and outputs.")
|
||||
|
||||
def connect(self):
|
||||
"""Connects the edges of the graph together."""
|
||||
if self._connected:
|
||||
"""Connects the nodes & edges of the graph together."""
|
||||
if self._connected or len(self._graph) == 0:
|
||||
return
|
||||
|
||||
provides_what = defaultdict(list)
|
||||
requires_what = defaultdict(list)
|
||||
for t in self._graph.nodes_iter():
|
||||
for r in t.requires:
|
||||
for r in t.requires():
|
||||
requires_what[r].append(t)
|
||||
for p in t.provides:
|
||||
for p in t.provides():
|
||||
provides_what[p].append(t)
|
||||
for (i_want, n) in requires_what.items():
|
||||
if i_want not in provides_what:
|
||||
raise exc.InvalidStateException("Task %s requires input %s "
|
||||
"but no other task produces "
|
||||
"said output" % (n, i_want))
|
||||
for p in provides_what[i_want]:
|
||||
|
||||
for (want_what, who_wants) in requires_what.items():
|
||||
who_provided = 0
|
||||
for p in provides_what.get(want_what, []):
|
||||
# P produces for N so thats why we link P->N and not N->P
|
||||
self._graph.add_edge(p, n)
|
||||
for n in who_wants:
|
||||
if p is n:
|
||||
# No self-referencing allowed.
|
||||
continue
|
||||
why = {
|
||||
want_what: True,
|
||||
}
|
||||
self._graph.add_edge(p, n, why)
|
||||
who_provided += 1
|
||||
if not who_provided:
|
||||
raise exc.InvalidStateException("Task/s %s requires input %s "
|
||||
"but no other task produces "
|
||||
"said output." % (who_wants,
|
||||
want_what))
|
||||
|
||||
self._connected = True
|
||||
@@ -16,15 +16,15 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from taskflow.patterns import ordered_workflow
|
||||
from taskflow.patterns import ordered_flow
|
||||
|
||||
|
||||
class Workflow(ordered_workflow.Workflow):
|
||||
class Flow(ordered_flow.Flow):
|
||||
"""A linear chain of *independent* tasks that can be applied as one unit or
|
||||
rolled back as one unit."""
|
||||
|
||||
def __init__(self, name, tolerant=False, parents=None):
|
||||
super(Workflow, self).__init__(name, tolerant, parents)
|
||||
super(Flow, self).__init__(name, tolerant, parents)
|
||||
self._tasks = []
|
||||
|
||||
def add(self, task):
|
||||
@@ -28,10 +28,10 @@ from taskflow import states
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Workflow(object):
|
||||
class Flow(object):
|
||||
"""A set tasks that can be applied as one unit or rolled back as one
|
||||
unit using an ordered arrangements of said tasks where reversion can be
|
||||
handled by reversing through the tasks applied."""
|
||||
unit using an ordered arrangements of said tasks where reversion is by
|
||||
default handled by reversing through the tasks applied."""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@@ -43,8 +43,8 @@ class Workflow(object):
|
||||
# If this chain can ignore individual task reversion failure then this
|
||||
# should be set to true, instead of the default value of false.
|
||||
self.tolerant = tolerant
|
||||
# If this workflow has a parent workflow/s which need to be reverted if
|
||||
# this workflow fails then please include them here to allow this child
|
||||
# If this flow has a parent flow/s which need to be reverted if
|
||||
# this flow fails then please include them here to allow this child
|
||||
# to call the parents...
|
||||
self.parents = parents
|
||||
# This should be a functor that returns whether a given task has
|
||||
@@ -73,11 +73,11 @@ class Workflow(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def add(self, task):
|
||||
"""Adds a given task to this workflow."""
|
||||
"""Adds a given task to this flow."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __str__(self):
|
||||
return "Workflow: %s" % (self.name)
|
||||
return "Flow: %s" % (self.name)
|
||||
|
||||
@abc.abstractmethod
|
||||
def order(self):
|
||||
@@ -93,16 +93,16 @@ class Workflow(object):
|
||||
def _perform_reconcilation(self, context, task, excp):
|
||||
# Attempt to reconcile the given exception that occured while applying
|
||||
# the given task and either reconcile said task and its associated
|
||||
# failure, so that the workflow can continue or abort and perform
|
||||
# failure, so that the flow can continue or abort and perform
|
||||
# some type of undo of the tasks already completed.
|
||||
try:
|
||||
self._change_state(context, states.REVERTING)
|
||||
except Exception:
|
||||
LOG.exception("Dropping exception catched when"
|
||||
" changing state to reverting while performing"
|
||||
" reconcilation on a tasks exception.")
|
||||
cause = exc.TaskException(task, self, excp)
|
||||
with excutils.save_and_reraise_exception():
|
||||
try:
|
||||
self._change_state(context, states.REVERTING)
|
||||
except Exception:
|
||||
LOG.exception("Dropping exception catched when"
|
||||
" changing state to reverting while performing"
|
||||
" reconcilation on a tasks exception.")
|
||||
try:
|
||||
self._on_task_error(context, task)
|
||||
except Exception:
|
||||
@@ -111,7 +111,7 @@ class Workflow(object):
|
||||
" exception.")
|
||||
# The default strategy will be to rollback all the contained
|
||||
# tasks by calling there reverting methods, and then calling
|
||||
# any parent workflows rollbacks (and so-on).
|
||||
# any parent flows rollbacks (and so-on).
|
||||
try:
|
||||
self.rollback(context, cause)
|
||||
finally:
|
||||
@@ -124,7 +124,7 @@ class Workflow(object):
|
||||
|
||||
def run(self, context, *args, **kwargs):
|
||||
if self.state != states.PENDING:
|
||||
raise exc.InvalidStateException("Unable to run workflow when "
|
||||
raise exc.InvalidStateException("Unable to run flow when "
|
||||
"in state %s" % (self.state))
|
||||
|
||||
if self.result_fetcher:
|
||||
@@ -233,10 +233,10 @@ class Workflow(object):
|
||||
def rollback(self, context, cause):
|
||||
# Performs basic task by task rollback by going through the reverse
|
||||
# order that tasks have finished and asking said task to undo whatever
|
||||
# it has done. If this workflow has any parent workflows then they will
|
||||
# it has done. If this flow has any parent flows then they will
|
||||
# also be called to rollback any tasks said parents contain.
|
||||
#
|
||||
# Note(harlowja): if a workflow can more simply revert a whole set of
|
||||
# Note(harlowja): if a flow can more simply revert a whole set of
|
||||
# tasks via a simpler command then it can override this method to
|
||||
# accomplish that.
|
||||
#
|
||||
@@ -253,14 +253,14 @@ class Workflow(object):
|
||||
if not self.tolerant:
|
||||
log_f = LOG.exception
|
||||
msg = ("Failed rolling back stage %(index)s (%(task)s)"
|
||||
" of workflow %(workflow)s, due to inner exception.")
|
||||
log_f(msg % {'index': (i + 1), 'task': task, 'workflow': self})
|
||||
" of flow %(flow)s, due to inner exception.")
|
||||
log_f(msg % {'index': (i + 1), 'task': task, 'flow': self})
|
||||
if not self.tolerant:
|
||||
# NOTE(harlowja): LOG a msg AND re-raise the exception if
|
||||
# the chain does not tolerate exceptions happening in the
|
||||
# rollback method.
|
||||
raise
|
||||
if self.parents:
|
||||
# Rollback any parents workflows if they exist...
|
||||
# Rollback any parents flows if they exist...
|
||||
for p in self.parents:
|
||||
p.rollback(context, cause)
|
||||
130
taskflow/tests/unit/test_graph_flow.py
Normal file
130
taskflow/tests/unit/test_graph_flow.py
Normal file
@@ -0,0 +1,130 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
import unittest
|
||||
|
||||
from taskflow import exceptions as excp
|
||||
from taskflow import states
|
||||
from taskflow import task
|
||||
from taskflow import wrappers
|
||||
|
||||
from taskflow.patterns import graph_flow as gw
|
||||
|
||||
|
||||
def null_functor(*args, **kwargs):
|
||||
return None
|
||||
|
||||
|
||||
class GraphFlowTest(unittest.TestCase):
|
||||
def testRevertPath(self):
|
||||
flo = gw.Flow("test-flow")
|
||||
reverted = []
|
||||
|
||||
def run1(context, *args, **kwargs):
|
||||
return {
|
||||
'a': 1,
|
||||
}
|
||||
|
||||
def run1_revert(context, result, cause):
|
||||
reverted.append('run1')
|
||||
self.assertEquals(states.REVERTING, cause.flow.state)
|
||||
self.assertEquals(result, {'a': 1})
|
||||
|
||||
def run2(context, a, *args, **kwargs):
|
||||
raise Exception('Dead')
|
||||
|
||||
flo.add(wrappers.FunctorTask(None, run1, run1_revert,
|
||||
provides_what=['a'],
|
||||
extract_requires=True))
|
||||
flo.add(wrappers.FunctorTask(None, run2, null_functor,
|
||||
provides_what=['c'],
|
||||
extract_requires=True))
|
||||
|
||||
self.assertEquals(states.PENDING, flo.state)
|
||||
self.assertRaises(Exception, flo.run, {})
|
||||
self.assertEquals(states.FAILURE, flo.state)
|
||||
self.assertEquals(['run1'], reverted)
|
||||
|
||||
def testConnectRequirementFailure(self):
|
||||
|
||||
def run1(context, *args, **kwargs):
|
||||
return {
|
||||
'a': 1,
|
||||
}
|
||||
|
||||
def run2(context, b, c, d, *args, **kwargs):
|
||||
return None
|
||||
|
||||
flo = gw.Flow("test-flow")
|
||||
flo.add(wrappers.FunctorTask(None, run1, null_functor,
|
||||
provides_what=['a'],
|
||||
extract_requires=True))
|
||||
flo.add(wrappers.FunctorTask(None, run2, null_functor,
|
||||
extract_requires=True))
|
||||
|
||||
self.assertRaises(excp.InvalidStateException, flo.connect)
|
||||
self.assertRaises(excp.InvalidStateException, flo.run, {})
|
||||
self.assertRaises(excp.InvalidStateException, flo.order)
|
||||
|
||||
def testHappyPath(self):
|
||||
flo = gw.Flow("test-flow")
|
||||
|
||||
run_order = []
|
||||
f_args = {}
|
||||
|
||||
def run1(context, *args, **kwargs):
|
||||
run_order.append('ran1')
|
||||
return {
|
||||
'a': 1,
|
||||
}
|
||||
|
||||
def run2(context, a, *args, **kwargs):
|
||||
run_order.append('ran2')
|
||||
return {
|
||||
'c': 3,
|
||||
}
|
||||
|
||||
def run3(context, a, *args, **kwargs):
|
||||
run_order.append('ran3')
|
||||
return {
|
||||
'b': 2,
|
||||
}
|
||||
|
||||
def run4(context, b, c, *args, **kwargs):
|
||||
run_order.append('ran4')
|
||||
f_args['b'] = b
|
||||
f_args['c'] = c
|
||||
|
||||
flo.add(wrappers.FunctorTask(None, run1, null_functor,
|
||||
provides_what=['a'],
|
||||
extract_requires=True))
|
||||
flo.add(wrappers.FunctorTask(None, run2, null_functor,
|
||||
provides_what=['c'],
|
||||
extract_requires=True))
|
||||
flo.add(wrappers.FunctorTask(None, run3, null_functor,
|
||||
provides_what=['b'],
|
||||
extract_requires=True))
|
||||
flo.add(wrappers.FunctorTask(None, run4, null_functor,
|
||||
extract_requires=True))
|
||||
|
||||
flo.run({})
|
||||
self.assertEquals(['ran1', 'ran2', 'ran3', 'ran4'], sorted(run_order))
|
||||
self.assertEquals('ran1', run_order[0])
|
||||
self.assertEquals('ran4', run_order[-1])
|
||||
self.assertEquals({'b': 2, 'c': 3}, f_args)
|
||||
@@ -23,14 +23,14 @@ from taskflow import states
|
||||
from taskflow import task
|
||||
from taskflow import wrappers
|
||||
|
||||
from taskflow.patterns import linear_workflow as lw
|
||||
from taskflow.patterns import linear_flow as lw
|
||||
|
||||
|
||||
def null_functor(*args, **kwargs):
|
||||
return None
|
||||
|
||||
|
||||
class LinearWorkflowTest(unittest.TestCase):
|
||||
class LinearFlowTest(unittest.TestCase):
|
||||
def makeRevertingTask(self, token, blowup=False):
|
||||
|
||||
def do_apply(token, context, *args, **kwargs):
|
||||
@@ -60,8 +60,48 @@ class LinearWorkflowTest(unittest.TestCase):
|
||||
functools.partial(do_interrupt, token),
|
||||
null_functor)
|
||||
|
||||
def testSadFlowStateChanges(self):
|
||||
wf = lw.Flow("the-test-action")
|
||||
flow_changes = []
|
||||
|
||||
def flow_listener(context, wf, previous_state):
|
||||
flow_changes.append(previous_state)
|
||||
|
||||
wf.listeners.append(flow_listener)
|
||||
wf.add(self.makeRevertingTask(1, True))
|
||||
|
||||
self.assertEquals(states.PENDING, wf.state)
|
||||
self.assertRaises(Exception, wf.run, {})
|
||||
|
||||
expected_states = [
|
||||
states.PENDING,
|
||||
states.STARTED,
|
||||
states.RUNNING,
|
||||
states.REVERTING,
|
||||
]
|
||||
self.assertEquals(expected_states, flow_changes)
|
||||
self.assertEquals(states.FAILURE, wf.state)
|
||||
|
||||
def testHappyFlowStateChanges(self):
|
||||
wf = lw.Flow("the-test-action")
|
||||
flow_changes = []
|
||||
|
||||
def flow_listener(context, wf, previous_state):
|
||||
flow_changes.append(previous_state)
|
||||
|
||||
wf.listeners.append(flow_listener)
|
||||
wf.add(self.makeRevertingTask(1))
|
||||
|
||||
self.assertEquals(states.PENDING, wf.state)
|
||||
wf.run({})
|
||||
|
||||
self.assertEquals([states.PENDING, states.STARTED, states.RUNNING],
|
||||
flow_changes)
|
||||
|
||||
self.assertEquals(states.SUCCESS, wf.state)
|
||||
|
||||
def testHappyPath(self):
|
||||
wf = lw.Workflow("the-test-action")
|
||||
wf = lw.Flow("the-test-action")
|
||||
|
||||
for i in range(0, 10):
|
||||
wf.add(self.makeRevertingTask(i))
|
||||
@@ -74,7 +114,7 @@ class LinearWorkflowTest(unittest.TestCase):
|
||||
self.assertEquals('passed', v)
|
||||
|
||||
def testRevertingPath(self):
|
||||
wf = lw.Workflow("the-test-action")
|
||||
wf = lw.Flow("the-test-action")
|
||||
wf.add(self.makeRevertingTask(1))
|
||||
wf.add(self.makeRevertingTask(2, True))
|
||||
|
||||
@@ -84,7 +124,7 @@ class LinearWorkflowTest(unittest.TestCase):
|
||||
self.assertEquals(1, len(run_context))
|
||||
|
||||
def testInterruptPath(self):
|
||||
wf = lw.Workflow("the-int-action")
|
||||
wf = lw.Flow("the-int-action")
|
||||
|
||||
result_storage = {}
|
||||
|
||||
@@ -124,7 +164,7 @@ class LinearWorkflowTest(unittest.TestCase):
|
||||
self.assertEquals(2, len(context))
|
||||
|
||||
def testParentRevertingPath(self):
|
||||
happy_wf = lw.Workflow("the-happy-action")
|
||||
happy_wf = lw.Flow("the-happy-action")
|
||||
for i in range(0, 10):
|
||||
happy_wf.add(self.makeRevertingTask(i))
|
||||
context = {}
|
||||
@@ -133,7 +173,7 @@ class LinearWorkflowTest(unittest.TestCase):
|
||||
for (_k, v) in context.items():
|
||||
self.assertEquals('passed', v)
|
||||
|
||||
baddy_wf = lw.Workflow("the-bad-action", parents=[happy_wf])
|
||||
baddy_wf = lw.Flow("the-bad-action", parents=[happy_wf])
|
||||
baddy_wf.add(self.makeRevertingTask(i + 1))
|
||||
baddy_wf.add(self.makeRevertingTask(i + 2, True))
|
||||
self.assertRaises(Exception, baddy_wf.run, context)
|
||||
@@ -32,7 +32,7 @@ from taskflow import task
|
||||
from taskflow import wrappers as wrap
|
||||
|
||||
from taskflow.backends import memory
|
||||
from taskflow.patterns import linear_workflow as lw
|
||||
from taskflow.patterns import linear_flow as lw
|
||||
|
||||
|
||||
def null_functor(*args, **kwargs):
|
||||
@@ -94,8 +94,8 @@ class MemoryBackendTest(unittest.TestCase):
|
||||
for j in my_jobs:
|
||||
j.state = states.PENDING
|
||||
for j in my_jobs:
|
||||
# Create some dummy workflow for the job
|
||||
wf = lw.Workflow('dummy')
|
||||
# Create some dummy flow for the job
|
||||
wf = lw.Flow('dummy')
|
||||
for i in range(0, 5):
|
||||
t = wrap.FunctorTask(None,
|
||||
null_functor, null_functor)
|
||||
@@ -142,7 +142,7 @@ class MemoryBackendTest(unittest.TestCase):
|
||||
self.assertEquals(states.CLAIMED, j.state)
|
||||
self.assertEquals('me', j.owner)
|
||||
|
||||
wf = lw.Workflow("the-int-action")
|
||||
wf = lw.Flow("the-int-action")
|
||||
j.associate(wf)
|
||||
self.assertEquals(states.PENDING, wf.state)
|
||||
|
||||
@@ -168,7 +168,7 @@ class MemoryBackendTest(unittest.TestCase):
|
||||
wf.run(j.context)
|
||||
|
||||
self.assertEquals(1, len(j.logbook))
|
||||
self.assertEquals(4, len(j.logbook.fetch_workflow("the-int-action")))
|
||||
self.assertEquals(4, len(j.logbook["the-int-action"]))
|
||||
self.assertEquals(1, len(call_log))
|
||||
|
||||
wf.reset()
|
||||
@@ -176,7 +176,7 @@ class MemoryBackendTest(unittest.TestCase):
|
||||
wf.run(j.context)
|
||||
|
||||
self.assertEquals(1, len(j.logbook))
|
||||
self.assertEquals(6, len(j.logbook.fetch_workflow("the-int-action")))
|
||||
self.assertEquals(6, len(j.logbook["the-int-action"]))
|
||||
self.assertEquals(2, len(call_log))
|
||||
self.assertEquals(states.SUCCESS, wf.state)
|
||||
|
||||
@@ -190,7 +190,7 @@ class MemoryBackendTest(unittest.TestCase):
|
||||
self.assertEquals(states.CLAIMED, j.state)
|
||||
self.assertEquals('me', j.owner)
|
||||
|
||||
wf = lw.Workflow('the-line-action')
|
||||
wf = lw.Flow('the-line-action')
|
||||
self.assertEquals(states.PENDING, wf.state)
|
||||
j.associate(wf)
|
||||
|
||||
@@ -207,7 +207,7 @@ class MemoryBackendTest(unittest.TestCase):
|
||||
wf.run(j.context)
|
||||
|
||||
self.assertEquals(1, len(j.logbook))
|
||||
self.assertEquals(4, len(j.logbook.fetch_workflow("the-line-action")))
|
||||
self.assertEquals(4, len(j.logbook["the-line-action"]))
|
||||
self.assertEquals(2, len(call_log))
|
||||
self.assertEquals(states.SUCCESS, wf.state)
|
||||
|
||||
|
||||
@@ -18,6 +18,26 @@
|
||||
|
||||
import contextlib
|
||||
import threading
|
||||
import time
|
||||
|
||||
|
||||
def await(check_functor, timeout=None):
|
||||
if timeout is not None:
|
||||
end_time = time.time() + max(0, timeout)
|
||||
else:
|
||||
end_time = None
|
||||
# Use the same/similar scheme that the python condition class uses.
|
||||
delay = 0.0005
|
||||
while not check_functor():
|
||||
time.sleep(delay)
|
||||
if end_time is not None:
|
||||
remaining = end_time - time.time()
|
||||
if remaining <= 0:
|
||||
return False
|
||||
delay = min(delay * 2, remaining, 0.05)
|
||||
else:
|
||||
delay = min(delay * 2, 0.05)
|
||||
return True
|
||||
|
||||
|
||||
class ReaderWriterLock(object):
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import inspect
|
||||
|
||||
from taskflow import task
|
||||
|
||||
|
||||
@@ -25,13 +27,29 @@ class FunctorTask(task.Task):
|
||||
situations where existing functions already are in place and you just want
|
||||
to wrap them up."""
|
||||
|
||||
def __init__(self, name, apply_functor, revert_functor):
|
||||
def __init__(self, name, apply_functor, revert_functor,
|
||||
provides_what=None, extract_requires=False):
|
||||
super(FunctorTask, self).__init__(name)
|
||||
if not self.name:
|
||||
self.name = "%s_%s" % (apply_functor.__name__,
|
||||
revert_functor.__name__)
|
||||
self._apply_functor = apply_functor
|
||||
self._revert_functor = revert_functor
|
||||
self._requires = set()
|
||||
self._provides = set()
|
||||
if provides_what:
|
||||
self._provides.update(provides_what)
|
||||
if extract_requires:
|
||||
for a in inspect.getargspec(apply_functor).args:
|
||||
if a in ('self', 'context',):
|
||||
continue
|
||||
self._requires.add(a)
|
||||
|
||||
def requires(self):
|
||||
return set(self._requires)
|
||||
|
||||
def provides(self):
|
||||
return set(self._provides)
|
||||
|
||||
def apply(self, context, *args, **kwargs):
|
||||
return self._apply_functor(context, *args, **kwargs)
|
||||
|
||||
Reference in New Issue
Block a user