Fix misspellings

Change-Id: I6f332f01d197c6ba1b02de1145714718f8aea6fb
This commit is contained in:
Stanislav Kudriashev
2013-12-10 13:13:30 +02:00
parent c49b07e231
commit 1a70f8cb44
16 changed files with 31 additions and 30 deletions

View File

@@ -109,7 +109,7 @@ class ParallelGraphAction(SequentialGraphAction):
nodes which can run (those which have there dependencies satisfied
or those with no dependencies) and submitting them to the executor
to be ran, and then after running this process will be repeated until
no more nodes can be ran (or a failure has a occured and all nodes
no more nodes can be ran (or a failure has a occurred and all nodes
were stopped from further running).
"""
# A deque is a thread safe push/pop/popleft/append implementation

View File

@@ -104,7 +104,7 @@ def verify(spec, **kwargs):
# These two functions connect into the state transition notification emission
# points that the engine outputs, they can be used to log state transitions
# that are occuring, or they can be used to suspend the engine (or perform
# that are occurring, or they can be used to suspend the engine (or perform
# other useful activities).
def flow_watch(state, details):
print('Flow => %s' % state)
@@ -124,7 +124,7 @@ flow = lf.Flow("make-auto").add(
# These *_installed outputs allow for other tasks to depend on certain
# actions being performed (aka the components were installed), another
# way to do this is to link() the tasks manually instead of creating
# an 'artifical' data dependency that accomplishes the same goal the
# an 'artificial' data dependency that accomplishes the same goal the
# manual linking would result in.
task.FunctorTask(install_engine, provides='engine_installed'),
task.FunctorTask(install_doors, provides='doors_installed'),

View File

@@ -80,7 +80,7 @@ flow = lf.Flow('root').add(
# Calculate 'z1 = x1+y1 = 5'
#
# Rebind here means that the execute() function x argument will be
# satisified from a previous output named 'x1', and the y argument
# satisfied from a previous output named 'x1', and the y argument
# of execute() will be populated from the previous output named 'y1'
#
# The output (result of adding) will be mapped into a variable named
@@ -95,6 +95,6 @@ flow = lf.Flow('root').add(
# The result here will be all results (from all tasks) which is stored in an
# in-memory storage location that backs this engine since it is not configured
# with persistance storage.
# with persistence storage.
result = taskflow.engines.run(flow, engine_conf='parallel')
print(result)

View File

@@ -39,7 +39,7 @@ from taskflow import task
# are bound with ('z', 'd') keys from the engines storage mechanism.
#
# A multiplier task uses a binding that another task also provides, but this
# example explicitly shows that 'z' parameter is binded with 'a' key
# example explicitly shows that 'z' parameter is bound with 'a' key
# This shows that if a task depends on a key named the same as a key provided
# from another task the name can be remapped to take the desired key from a
# different origin.
@@ -73,7 +73,8 @@ class Adder(task.Task):
return x + y
# This task multiplies an input variable by a multipler and returns the result.
# This task multiplies an input variable by a multiplier and returns the
# result.
#
# Note that since this task does not have a revert() function (since
# multiplication is a stateless operation) and there are no side-effects that
@@ -112,6 +113,6 @@ flow = lf.Flow('root').add(
# The result here will be all results (from all tasks) which is stored in an
# in-memory storage location that backs this engine since it is not configured
# with persistance storage.
# with persistence storage.
results = taskflow.engines.run(flow)
print(results)

View File

@@ -70,7 +70,7 @@ class UrlCaller(object):
status_cb(float(i) / len(data))
# Since engines save the output of tasks to a optional persistant storage
# Since engines save the output of tasks to a optional persistent storage
# backend resources have to be dealt with in a slightly different manner since
# resources are transient and can not be persisted (or serialized). For tasks
# that require access to a set of resources it is a common pattern to provide
@@ -158,7 +158,7 @@ class DeclareSuccess(task.Task):
resources = ResourceFetcher()
flow = lf.Flow("initialize-me")
# 1. First we extract the api request into a useable format.
# 1. First we extract the api request into a usable format.
# 2. Then we go ahead and make a database entry for our request.
flow.add(ExtractInputRequest(resources), MakeDBEntry(resources))

View File

@@ -28,7 +28,7 @@ import tempfile
# script which doesn't 'crash' and it will resume all the given engines flows
# that did not complete and run them to completion (instead of crashing).
#
# This shows how a set of tasks can be finished even after repeatingly being
# This shows how a set of tasks can be finished even after repeatedly being
# crashed, *crash resistance* if you may call it, due to the engine concept as
# well as the persistence layer which keeps track of the state a flow
# transitions through and persists the intermediary inputs and outputs and

View File

@@ -45,9 +45,9 @@ from taskflow.utils import eventlet_utils as e_utils
from taskflow.utils import persistence_utils as p_utils
# INTRO: This examples shows how a hierachy of flows can be used to create a vm
# in a reliable & resumable manner using taskflow + a miniature version of what
# nova does while booting a vm.
# INTRO: This examples shows how a hierarchy of flows can be used to create a
# vm in a reliable & resumable manner using taskflow + a miniature version of
# what nova does while booting a vm.
@contextlib.contextmanager

View File

@@ -41,7 +41,7 @@ from taskflow.persistence import backends
from taskflow.utils import persistence_utils as p_utils
# INTRO: This examples shows how a hierachy of flows can be used to create a
# INTRO: This examples shows how a hierarchy of flows can be used to create a
# pseudo-volume in a reliable & resumable manner using taskflow + a miniature
# version of what cinder does while creating a volume (very miniature).

View File

@@ -33,7 +33,7 @@ from taskflow import task
# INTRO: In this example we create two tasks, each of which ~calls~ a given
# ~phone~ number (provided as a function input) in a linear fashion (one after
# the other). For a workflow which is serial this shows a extremly simple way
# the other). For a workflow which is serial this shows a extremely simple way
# of structuring your tasks (the code that does the work) into a linear
# sequence (the flow) and then passing the work off to an engine, with some
# initial data to be ran in a reliable manner.

View File

@@ -37,7 +37,7 @@ from taskflow import task
# a given ~phone~ number (provided as a function input) in a linear fashion
# (one after the other).
#
# For a workflow which is serial this shows a extremly simple way
# For a workflow which is serial this shows a extremely simple way
# of structuring your tasks (the code that does the work) into a linear
# sequence (the flow) and then passing the work off to an engine, with some
# initial data to be ran in a reliable manner.
@@ -78,7 +78,7 @@ def task_watch(state, details):
# as tasks. There was previous work done to just allow a function to be
# directly passed, but in python 3.0 there is no easy way to capture an
# instance method, so this wrapping approach was decided upon instead which
# can attach to instance methods (if thats desired).
# can attach to instance methods (if that's desired).
flow = lf.Flow("Call-them")
flow.add(task.FunctorTask(execute=call_jim))
flow.add(task.FunctorTask(execute=call_joe))

View File

@@ -73,7 +73,7 @@ class JobNotFound(TaskFlowException):
class MissingDependencies(InvalidStateException):
"""Raised when a entity has dependencies that can not be satisified."""
"""Raised when a entity has dependencies that can not be satisfied."""
message = ("%(who)s requires %(requirements)s but no other entity produces"
" said requirements")

View File

@@ -333,7 +333,7 @@ class Connection(base.Connection):
def _step_book():
self._run_with_process_lock("book", _step_flow)
# Acquire all locks by going through this little hiearchy.
# Acquire all locks by going through this little hierarchy.
self._run_with_process_lock("init", _step_book)
@lock_utils.locked
@@ -368,7 +368,7 @@ class Connection(base.Connection):
if e.errno != errno.ENOENT:
raise
# Acquire all locks by going through this little hiearchy.
# Acquire all locks by going through this little hierarchy.
self._run_with_process_lock("book", _destroy_book)
def _get_logbook(self, book_uuid):

View File

@@ -55,7 +55,7 @@ class LogBook(object):
def add(self, flow_detail):
"""Adds a new entry to the underlying logbook.
Does not *guarantee* that the details will be immediatly saved.
Does not *guarantee* that the details will be immediately saved.
"""
self._flowdetails.append(flow_detail)
@@ -139,11 +139,11 @@ class FlowDetail(object):
class TaskDetail(object):
"""This class contains an entry that contains the persistance of a task
"""This class contains an entry that contains the persistence of a task
after or before (or during) it is running including any results it may have
produced, any state that it may be in (failed for example), any exception
that occured when running and any associated stacktrace that may have
occuring during that exception being thrown and any other metadata that
that occurred when running and any associated stacktrace that may have
occurring during that exception being thrown and any other metadata that
should be stored along-side the details about this task.
The data contained within this class need *not* backed by the backend

View File

@@ -301,7 +301,7 @@ class Storage(object):
"""Add values into storage
This method should be used to put flow parameters (requirements that
are not satisified by any task in the flow) into storage.
are not satisfied by any task in the flow) into storage.
"""
injector_td = self._flowdetail.find_by_name(self.injector_name)
if injector_td is None:

View File

@@ -80,7 +80,7 @@ class MultiLock(object):
def is_locked(lock):
# NOTE(harlowja): the threading2 lock doesn't seem to have this
# attribute, so thats why we are checking it existing first.
# attribute, so that's why we are checking it existing first.
if hasattr(lock, 'locked'):
return lock.locked()
return False

View File

@@ -167,7 +167,7 @@ def as_bool(val):
def as_int(obj, quiet=False):
"""Converts an arbitary value into a integer."""
"""Converts an arbitrary value into a integer."""
# Try "2" -> 2
try:
return int(obj)
@@ -204,7 +204,7 @@ def ensure_tree(path):
class TransitionNotifier(object):
"""A utility helper class that can be used to subscribe to
notifications of events occuring as well as allow a entity to post said
notifications of events occurring as well as allow a entity to post said
notifications to subscribers.
"""
@@ -412,7 +412,7 @@ class Failure(object):
"""Check if any of exc_classes caused the failure
Arguments of this method can be exception types or type
names (stings). If captured excption is instance of
names (stings). If captured exception is instance of
exception of given type, the corresponding argument is
returned. Else, None is returned.
"""