Remove deprecated things for 2.0 release

Change-Id: Id9adbc50bd51adc77ce88f698ad0ea2ee63fc5e2
This commit is contained in:
Joshua Harlow
2015-07-06 15:31:29 -07:00
committed by Joshua Harlow
parent 3321b22ca8
commit afbfe77a7f
21 changed files with 63 additions and 738 deletions

View File

@@ -93,7 +93,7 @@ A basic example is:
>>> flo.add(CatTalk(), DogTalk(provides="dog"))
<taskflow.patterns.linear_flow.Flow object at 0x...>
>>> eng = engines.load(flo, store={'meow': 'meow', 'woof': 'woof'})
>>> eng.task_notifier.register(ANY, task_transition)
>>> eng.atom_notifier.register(ANY, task_transition)
>>> eng.run()
Task 'CatTalk' transition to state RUNNING
meow

View File

@@ -18,11 +18,6 @@ Banner
.. automodule:: taskflow.utils.banner
Deprecation
~~~~~~~~~~~
.. automodule:: taskflow.utils.deprecation
Eventlet
~~~~~~~~

View File

@@ -96,19 +96,6 @@ class ExecutorConductor(base.Conductor):
"""This attribute *can* be overridden by subclasses (for example if
an eventlet *green* event works better for the conductor user)."""
START_FINISH_EVENTS_EMITTED = tuple([
'compilation', 'preparation',
'validation', 'running',
])
"""Events will be emitted for the start and finish of each engine
activity defined above, the actual event name that can be registered
to subscribe to will be ``${event}_start`` and ``${event}_end`` where
the ``${event}`` in this pseudo-variable will be one of these events.
.. deprecated:: 1.23.0
Use :py:attr:`~EVENTS_EMITTED`
"""
EVENTS_EMITTED = tuple([
'compilation_start', 'compilation_end',
'preparation_start', 'preparation_end',
@@ -151,13 +138,6 @@ class ExecutorConductor(base.Conductor):
The method returns immediately regardless of whether the conductor has
been stopped.
.. deprecated:: 0.8
The ``timeout`` parameter is **deprecated** and is present for
backward compatibility **only**. In order to wait for the
conductor to gracefully shut down, :py:meth:`wait` should be used
instead.
"""
self._wait_timeout.interrupt()

View File

@@ -1,31 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from debtcollector import moves
from debtcollector import removals
from taskflow.conductors.backends import impl_blocking
# TODO(harlowja): remove this module soon...
removals.removed_module(__name__,
replacement="the conductor entrypoints",
version="0.8", removal_version="2.0",
stacklevel=4)
# TODO(harlowja): remove this proxy/legacy class soon...
SingleThreadedConductor = moves.moved_class(
impl_blocking.BlockingConductor, 'SingleThreadedConductor',
__name__, version="0.8", removal_version="2.0")

View File

@@ -17,7 +17,6 @@
import abc
from debtcollector import moves
import six
from taskflow.types import notifier
@@ -30,10 +29,6 @@ class Engine(object):
:ivar notifier: A notification object that will dispatch events that
occur related to the flow the engine contains.
:ivar task_notifier: A notification object that will dispatch events that
occur related to the tasks the engine contains.
occur related to the tasks the engine
contains (deprecated).
:ivar atom_notifier: A notification object that will dispatch events that
occur related to the atoms the engine contains.
"""
@@ -51,21 +46,6 @@ class Engine(object):
"""The flow notifier."""
return self._notifier
@property
@moves.moved_property('atom_notifier', version="0.6",
removal_version="2.0")
def task_notifier(self):
"""The task notifier.
.. deprecated:: 0.6
The property is **deprecated** and is present for
backward compatibility **only**. In order to access this
property going forward the :py:attr:`.atom_notifier` should
be used instead.
"""
return self._atom_notifier
@property
def atom_notifier(self):
"""The atom notifier."""
@@ -151,8 +131,3 @@ class Engine(object):
not currently be preempted) and move the engine into a suspend state
which can then later be resumed from.
"""
# TODO(harlowja): remove in 0.7 or later...
EngineBase = moves.moved_class(Engine, 'EngineBase', __name__,
version="0.6", removal_version="2.0")

View File

@@ -15,10 +15,7 @@
# under the License.
import contextlib
import itertools
import traceback
from debtcollector import renames
from oslo_utils import importutils
from oslo_utils import reflection
import six
@@ -38,69 +35,28 @@ ENGINES_NAMESPACE = 'taskflow.engines'
# The default entrypoint engine type looked for when it is not provided.
ENGINE_DEFAULT = 'default'
# TODO(harlowja): only used during the deprecation cycle, remove it once
# ``_extract_engine_compat`` is also gone...
_FILE_NAMES = [__file__]
if six.PY2:
# Due to a bug in py2.x the __file__ may point to the pyc file & since
# we are using the traceback module and that module only shows py files
# we have to do a slight adjustment to ensure we match correctly...
#
# This is addressed in https://www.python.org/dev/peps/pep-3147/#file
if __file__.endswith("pyc"):
_FILE_NAMES.append(__file__[0:-1])
_FILE_NAMES = tuple(_FILE_NAMES)
def _extract_engine(**kwargs):
def _extract_engine(engine, **kwargs):
"""Extracts the engine kind and any associated options."""
kind = engine
if not kind:
kind = ENGINE_DEFAULT
def _compat_extract(**kwargs):
options = {}
kind = kwargs.pop('engine', None)
engine_conf = kwargs.pop('engine_conf', None)
if engine_conf is not None:
if isinstance(engine_conf, six.string_types):
kind = engine_conf
else:
options.update(engine_conf)
kind = options.pop('engine', None)
if not kind:
kind = ENGINE_DEFAULT
# See if it's a URI and if so, extract any further options...
try:
uri = misc.parse_uri(kind)
except (TypeError, ValueError):
pass
else:
kind = uri.scheme
options = misc.merge_uri(uri, options.copy())
# Merge in any leftover **kwargs into the options, this makes it so
# that the provided **kwargs override any URI or engine_conf specific
# options.
options.update(kwargs)
return (kind, options)
engine_conf = kwargs.get('engine_conf', None)
if engine_conf is not None:
# Figure out where our code ends and the calling code begins (this is
# needed since this code is called from two functions in this module,
# which means the stack level will vary by one depending on that).
finder = itertools.takewhile(
lambda frame: frame[0] in _FILE_NAMES,
reversed(traceback.extract_stack(limit=3)))
stacklevel = sum(1 for _frame in finder)
decorator = renames.renamed_kwarg('engine_conf', 'engine',
version="0.6",
removal_version="2.0",
# Three is added on since the
# decorator adds three of its own
# stack levels that we need to
# hop out of...
stacklevel=stacklevel + 3)
return decorator(_compat_extract)(**kwargs)
# See if it's a URI and if so, extract any further options...
options = {}
try:
uri = misc.parse_uri(kind)
except (TypeError, ValueError):
pass
else:
return _compat_extract(**kwargs)
kind = uri.scheme
options = misc.merge_uri(uri, options.copy())
# Merge in any leftover **kwargs into the options, this makes it so
# that the provided **kwargs override any URI/engine specific
# options.
options.update(kwargs)
return (kind, options)
def _fetch_factory(factory_name):
@@ -128,8 +84,8 @@ def _fetch_validate_factory(flow_factory):
def load(flow, store=None, flow_detail=None, book=None,
engine_conf=None, backend=None,
namespace=ENGINES_NAMESPACE, engine=ENGINE_DEFAULT, **kwargs):
backend=None, namespace=ENGINES_NAMESPACE,
engine=ENGINE_DEFAULT, **kwargs):
"""Load a flow into an engine.
This function creates and prepares an engine to run the provided flow. All
@@ -146,33 +102,24 @@ def load(flow, store=None, flow_detail=None, book=None,
:py:func:`~taskflow.persistence.backends.fetch` to obtain a
viable backend.
.. deprecated:: 0.6
The ``engine_conf`` argument is **deprecated** and is present
for backward compatibility **only**. In order to provide this
argument going forward the ``engine`` string (or URI) argument
should be used instead.
:param flow: flow to load
:param store: dict -- data to put to storage to satisfy flow requirements
:param flow_detail: FlowDetail that holds the state of the flow (if one is
not provided then one will be created for you in the provided backend)
:param book: LogBook to create flow detail in if flow_detail is None
:param engine_conf: engine type or URI and options (**deprecated**)
:param backend: storage backend to use or configuration that defines it
:param namespace: driver namespace for stevedore (or empty for default)
:param engine: string engine type or URI string with scheme that contains
the engine type and any URI specific components that will
become part of the engine options.
:param kwargs: arbitrary keyword arguments passed as options (merged with
any extracted ``engine`` and ``engine_conf`` options),
typically used for any engine specific options that do not
fit as any of the existing arguments.
any extracted ``engine``), typically used for any engine
specific options that do not fit as any of the
existing arguments.
:returns: engine
"""
kind, options = _extract_engine(engine_conf=engine_conf,
engine=engine, **kwargs)
kind, options = _extract_engine(engine, **kwargs)
if isinstance(backend, dict):
backend = p_backends.fetch(backend)
@@ -197,7 +144,7 @@ def load(flow, store=None, flow_detail=None, book=None,
def run(flow, store=None, flow_detail=None, book=None,
engine_conf=None, backend=None, namespace=ENGINES_NAMESPACE,
backend=None, namespace=ENGINES_NAMESPACE,
engine=ENGINE_DEFAULT, **kwargs):
"""Run the flow.
@@ -206,19 +153,12 @@ def run(flow, store=None, flow_detail=None, book=None,
The arguments are interpreted as for :func:`load() <load>`.
.. deprecated:: 0.6
The ``engine_conf`` argument is **deprecated** and is present
for backward compatibility **only**. In order to provide this
argument going forward the ``engine`` string (or URI) argument
should be used instead.
:returns: dictionary of all named
results (see :py:meth:`~.taskflow.storage.Storage.fetch_all`)
"""
engine = load(flow, store=store, flow_detail=flow_detail, book=book,
engine_conf=engine_conf, backend=backend,
namespace=namespace, engine=engine, **kwargs)
backend=backend, namespace=namespace,
engine=engine, **kwargs)
engine.run()
return engine.storage.fetch_all()
@@ -262,7 +202,7 @@ def save_factory_details(flow_detail,
def load_from_factory(flow_factory, factory_args=None, factory_kwargs=None,
store=None, book=None, engine_conf=None, backend=None,
store=None, book=None, backend=None,
namespace=ENGINES_NAMESPACE, engine=ENGINE_DEFAULT,
**kwargs):
"""Loads a flow from a factory function into an engine.
@@ -278,13 +218,6 @@ def load_from_factory(flow_factory, factory_args=None, factory_kwargs=None,
Further arguments are interpreted as for :func:`load() <load>`.
.. deprecated:: 0.6
The ``engine_conf`` argument is **deprecated** and is present
for backward compatibility **only**. In order to provide this
argument going forward the ``engine`` string (or URI) argument
should be used instead.
:returns: engine
"""
@@ -301,7 +234,7 @@ def load_from_factory(flow_factory, factory_args=None, factory_kwargs=None,
flow_factory, factory_args, factory_kwargs,
backend=backend)
return load(flow=flow, store=store, flow_detail=flow_detail, book=book,
engine_conf=engine_conf, backend=backend, namespace=namespace,
backend=backend, namespace=namespace,
engine=engine, **kwargs)
@@ -332,7 +265,7 @@ def flow_from_detail(flow_detail):
return factory_fun(*args, **kwargs)
def load_from_detail(flow_detail, store=None, engine_conf=None, backend=None,
def load_from_detail(flow_detail, store=None, backend=None,
namespace=ENGINES_NAMESPACE, engine=ENGINE_DEFAULT,
**kwargs):
"""Reloads an engine previously saved.
@@ -345,16 +278,9 @@ def load_from_detail(flow_detail, store=None, engine_conf=None, backend=None,
Further arguments are interpreted as for :func:`load() <load>`.
.. deprecated:: 0.6
The ``engine_conf`` argument is **deprecated** and is present
for backward compatibility **only**. In order to provide this
argument going forward the ``engine`` string (or URI) argument
should be used instead.
:returns: engine
"""
flow = flow_from_detail(flow_detail)
return load(flow, flow_detail=flow_detail,
store=store, engine_conf=engine_conf, backend=backend,
store=store, backend=backend,
namespace=namespace, engine=engine, **kwargs)

View File

@@ -35,7 +35,7 @@ from taskflow.jobs import backends as job_backends
from taskflow import logging as taskflow_logging
from taskflow.patterns import linear_flow as lf
from taskflow.persistence import backends as persistence_backends
from taskflow.persistence import logbook
from taskflow.persistence import models
from taskflow import task
from taskflow.types import timing
@@ -176,9 +176,9 @@ def run_poster():
# unit of work we want to complete and the factory that
# can be called to create the tasks that the work unit needs
# to be done.
lb = logbook.LogBook("post-from-%s" % my_name)
fd = logbook.FlowDetail("song-from-%s" % my_name,
uuidutils.generate_uuid())
lb = models.LogBook("post-from-%s" % my_name)
fd = models.FlowDetail("song-from-%s" % my_name,
uuidutils.generate_uuid())
lb.add(fd)
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.save_logbook(lb)

View File

@@ -42,7 +42,7 @@ from taskflow import task
from taskflow.types import notifier
class PokeFutureListener(base.ListenerBase):
class PokeFutureListener(base.Listener):
def __init__(self, engine, future, task_name):
super(PokeFutureListener, self).__init__(
engine,

View File

@@ -18,7 +18,6 @@ from __future__ import absolute_import
import abc
from debtcollector import moves
from oslo_utils import excutils
import six
@@ -164,11 +163,6 @@ class Listener(object):
self._engine, exc_info=True)
# TODO(harlowja): remove in 0.7 or later...
ListenerBase = moves.moved_class(Listener, 'ListenerBase', __name__,
version="0.6", removal_version="2.0")
@six.add_metaclass(abc.ABCMeta)
class DumpingListener(Listener):
"""Abstract base class for dumping listeners.
@@ -208,25 +202,3 @@ class DumpingListener(Listener):
self._dump("%s has moved task '%s' (%s) into state '%s'"
" from state '%s'", self._engine, details['task_name'],
details['task_uuid'], state, details['old_state'])
# TODO(harlowja): remove in 0.7 or later...
class LoggingBase(moves.moved_class(DumpingListener,
'LoggingBase', __name__,
version="0.6", removal_version="2.0")):
"""Legacy logging base.
.. deprecated:: 0.6
This class is **deprecated** and is present for backward
compatibility **only**, its replacement
:py:class:`.DumpingListener` should be used going forward.
"""
def _dump(self, message, *args, **kwargs):
self._log(message, *args, **kwargs)
@abc.abstractmethod
def _log(self, message, *args, **kwargs):
"""Logs the provided *templated* message to some output."""

View File

@@ -20,7 +20,6 @@ import itertools
import six
import time
from debtcollector import moves
from oslo_utils import timeutils
from taskflow.engines.action_engine import compiler as co
@@ -104,11 +103,6 @@ class DurationListener(base.Listener):
self._record_ending(timer, item_type, item_name, state)
TimingListener = moves.moved_class(DurationListener,
'TimingListener', __name__,
version="0.8", removal_version="2.0")
class PrintingDurationListener(DurationListener):
"""Listener that prints the duration as well as recording it."""
@@ -132,11 +126,6 @@ class PrintingDurationListener(DurationListener):
self._printer("'%s' %s started." % (item_name, item_type))
PrintingTimingListener = moves.moved_class(
PrintingDurationListener, 'PrintingTimingListener', __name__,
version="0.8", removal_version="2.0")
class EventTimeListener(base.Listener):
"""Listener that captures task, flow, and retry event timestamps.

View File

@@ -20,7 +20,6 @@ import copy
import itertools
import posixpath as pp
from debtcollector import removals
import fasteners
import six
@@ -193,35 +192,14 @@ class FakeFilesystem(object):
return [selector_func(node, child_node)
for child_node in node.bfs_iter()]
@removals.removed_kwarg('recursive', version="0.11", removal_version="2.0")
def ls(self, path, recursive=False):
"""Return list of all children of the given path.
NOTE(harlowja): if ``recursive`` is passed in as truthy then the
absolute path is **always** returned (not the relative path). If
``recursive`` is left as the default or falsey then the
relative path is **always** returned.
This is documented in bug `1458114`_ and the existing behavior is
being maintained, to get a recursive version that is absolute (or is
not absolute) it is recommended to use the :py:meth:`.ls_r` method
instead.
.. deprecated:: 0.11
In a future release the ``recursive`` keyword argument will
be removed (so preferring and moving to the :py:meth:`.ls_r` should
occur earlier rather than later).
.. _1458114: https://bugs.launchpad.net/taskflow/+bug/1458114
"""
def ls(self, path, absolute=False):
"""Return list of all children of the given path (not recursive)."""
node = self._fetch_node(path)
if recursive:
if absolute:
selector_func = self._metadata_path_selector
child_node_it = node.bfs_iter()
else:
selector_func = self._up_to_root_selector
child_node_it = iter(node)
child_node_it = iter(node)
return [selector_func(node, child_node)
for child_node in child_node_it]

View File

@@ -1,37 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from debtcollector import removals
from taskflow.persistence import models
# TODO(harlowja): remove me in a future version, since the models
# module is more appropriately named to what the objects in it are used for...
removals.removed_module(__name__, replacement="'%s'" % models.__name__,
version="1.15", removal_version='2.0',
stacklevel=4)
# Keep alias classes/functions... around until this module is removed.
LogBook = models.LogBook
FlowDetail = models.FlowDetail
AtomDetail = models.AtomDetail
TaskDetail = models.TaskDetail
RetryDetail = models.RetryDetail
atom_detail_type = models.atom_detail_type
atom_detail_class = models.atom_detail_class
ATOM_TYPES = models.ATOM_TYPES

View File

@@ -25,7 +25,7 @@ import testscenarios
from taskflow import exceptions as exc
from taskflow.persistence import backends
from taskflow.persistence.backends import impl_dir
from taskflow.persistence import logbook
from taskflow.persistence import models
from taskflow import test
from taskflow.tests.unit.persistence import base
@@ -81,7 +81,7 @@ class DirPersistenceTest(testscenarios.TestWithScenarios,
for i in range(0, int(1.5 * self.max_cache_size)):
lb_name = 'book-%s' % (i)
lb_id = uuidutils.generate_uuid()
lb = logbook.LogBook(name=lb_name, uuid=lb_id)
lb = models.LogBook(name=lb_name, uuid=lb_id)
self.assertRaises(exc.NotFound, conn.get_logbook, lb_id)
conn.save_logbook(lb)
books_ids_made.append(lb_id)

View File

@@ -71,26 +71,6 @@ class MemoryFilesystemTest(test.TestCase):
self.assertEqual('c', fs['/c'])
self.assertEqual('db', fs['/d/b'])
def test_old_ls_recursive(self):
fs = impl_memory.FakeFilesystem()
fs.ensure_path("/d")
fs.ensure_path("/c/d")
fs.ensure_path("/b/c/d")
fs.ensure_path("/a/b/c/d")
contents = fs.ls("/", recursive=True)
self.assertEqual([
'/a',
'/b',
'/c',
'/d',
'/a/b',
'/b/c',
'/c/d',
'/a/b/c',
'/b/c/d',
'/a/b/c/d',
], contents)
def test_ls_recursive(self):
fs = impl_memory.FakeFilesystem()
fs.ensure_path("/d")
@@ -140,6 +120,24 @@ class MemoryFilesystemTest(test.TestCase):
contents = fs.ls_r("/a/b", absolute=False)
self.assertEqual(['c', 'c/d'], contents)
def test_ls_targeted(self):
fs = impl_memory.FakeFilesystem()
fs.ensure_path("/d")
fs.ensure_path("/c/d")
fs.ensure_path("/b/c/d")
fs.ensure_path("/a/b/c/d")
contents = fs.ls("/a/b", absolute=False)
self.assertEqual(['c'], contents)
def test_ls_targeted_absolute(self):
fs = impl_memory.FakeFilesystem()
fs.ensure_path("/d")
fs.ensure_path("/c/d")
fs.ensure_path("/b/c/d")
fs.ensure_path("/a/b/c/d")
contents = fs.ls("/a/b", absolute=True)
self.assertEqual(['/a/b/c'], contents)
def test_ls_recursive_targeted_absolute(self):
fs = impl_memory.FakeFilesystem()
fs.ensure_path("/d")
@@ -149,15 +147,6 @@ class MemoryFilesystemTest(test.TestCase):
contents = fs.ls_r("/a/b", absolute=True)
self.assertEqual(['/a/b/c', '/a/b/c/d'], contents)
def test_old_ls_recursive_targeted_absolute(self):
fs = impl_memory.FakeFilesystem()
fs.ensure_path("/d")
fs.ensure_path("/c/d")
fs.ensure_path("/b/c/d")
fs.ensure_path("/a/b/c/d")
contents = fs.ls("/a/b", recursive=True)
self.assertEqual(['/a/b/c', '/a/b/c/d'], contents)
def test_ensure_path(self):
fs = impl_memory.FakeFilesystem()
pieces = ['a', 'b', 'c']

View File

@@ -1201,7 +1201,7 @@ class RetryParallelExecutionTest(utils.EngineTestBase):
utils.ConditionalTask('task2')
)
engine = self._make_engine(flow)
engine.task_notifier.register('*', waiting_task.callback)
engine.atom_notifier.register('*', waiting_task.callback)
engine.storage.inject({'y': 2})
with utils.CaptureListener(engine, capture_flow=False) as capturer:
engine.run()
@@ -1237,7 +1237,7 @@ class RetryParallelExecutionTest(utils.EngineTestBase):
utils.ConditionalTask('task3'))
)
engine = self._make_engine(flow)
engine.task_notifier.register('*', waiting_task.callback)
engine.atom_notifier.register('*', waiting_task.callback)
engine.storage.inject({'y': 2})
with utils.CaptureListener(engine, capture_flow=False) as capturer:
engine.run()

View File

@@ -1,37 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import automaton
from automaton import exceptions as excp
from automaton import machines
from debtcollector import removals
# TODO(harlowja): remove me in a future version, since the futurist
# is the replacement for this whole module...
removals.removed_module(__name__,
replacement="the '%s' library" % automaton.__name__,
version="1.16", removal_version='2.0',
stacklevel=4)
# Keep alias classes/functions... around until this module is removed.
FSM = machines.FiniteMachine
FrozenMachine = excp.FrozenMachine
NotInitialized = excp.NotInitialized
InvalidState = excp.InvalidState
NotFound = excp.NotFound
Duplicate = excp.Duplicate

View File

@@ -1,36 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from debtcollector import removals
import futurist
# TODO(harlowja): remove me in a future version, since the futurist
# is the replacement for this whole module...
removals.removed_module(__name__,
replacement="the '%s' library" % futurist.__name__,
version="1.15", removal_version='2.0',
stacklevel=4)
# Keep alias classes/functions... around until this module is removed.
Future = futurist.Future
ThreadPoolExecutor = futurist.ThreadPoolExecutor
GreenThreadPoolExecutor = futurist.GreenThreadPoolExecutor
ProcessPoolExecutor = futurist.ProcessPoolExecutor
GreenFuture = futurist.GreenFuture
SynchronousExecutor = futurist.SynchronousExecutor
ExecutorStatistics = futurist.ExecutorStatistics

View File

@@ -1,32 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from debtcollector import removals
import futurist
from futurist import periodics
# TODO(harlowja): remove me in a future version, since the futurist
# is the replacement for this whole module...
removals.removed_module(__name__,
replacement="the '%s' library" % futurist.__name__,
version="1.15", removal_version='2.0',
stacklevel=4)
# Keep alias classes/functions... around until this module is removed.
periodic = periodics.periodic
PeriodicWorker = periodics.PeriodicWorker

View File

@@ -1,147 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import os
from debtcollector import removals
import six
# TODO(harlowja): remove me in a future version, since the futurist
# is the replacement for this whole module...
removals.removed_module(__name__,
replacement="the 'prettytable' library",
version="1.16", removal_version='2.0',
stacklevel=4)
class PleasantTable(object):
"""A tiny pretty printing table (like prettytable/tabulate but smaller).
Creates simply formatted tables (with no special sauce)::
>>> from taskflow.types import table
>>> tbl = table.PleasantTable(['Name', 'City', 'State', 'Country'])
>>> tbl.add_row(["Josh", "San Jose", "CA", "USA"])
>>> print(tbl.pformat())
+------+----------+-------+---------+
Name | City | State | Country
+------+----------+-------+---------+
Josh | San Jose | CA | USA
+------+----------+-------+---------+
"""
# Constants used when pretty formatting the table.
COLUMN_STARTING_CHAR = ' '
COLUMN_ENDING_CHAR = ''
COLUMN_SEPARATOR_CHAR = '|'
HEADER_FOOTER_JOINING_CHAR = '+'
HEADER_FOOTER_CHAR = '-'
LINE_SEP = os.linesep
@staticmethod
def _center_text(text, max_len, fill=' '):
return '{0:{fill}{align}{size}}'.format(text, fill=fill,
align="^", size=max_len)
@classmethod
def _size_selector(cls, possible_sizes):
"""Select the maximum size, utility function for adding borders.
The number two is used so that the edges of a column have spaces
around them (instead of being right next to a column separator).
:param possible_sizes: possible sizes available
:returns: maximum size
:rtype: number
"""
try:
return max(x + 2 for x in possible_sizes)
except ValueError:
return 0
def __init__(self, columns):
if len(columns) == 0:
raise ValueError("Column count must be greater than zero")
self._columns = [column.strip() for column in columns]
self._rows = []
def add_row(self, row):
if len(row) != len(self._columns):
raise ValueError("Row must have %s columns instead of"
" %s columns" % (len(self._columns), len(row)))
self._rows.append([six.text_type(column) for column in row])
def pformat(self):
# Figure out the maximum column sizes...
column_count = len(self._columns)
column_sizes = [0] * column_count
headers = []
for i, column in enumerate(self._columns):
possible_sizes_iter = itertools.chain(
[len(column)], (len(row[i]) for row in self._rows))
column_sizes[i] = self._size_selector(possible_sizes_iter)
headers.append(self._center_text(column, column_sizes[i]))
# Build the header and footer prefix/postfix.
header_footer_buf = six.StringIO()
header_footer_buf.write(self.HEADER_FOOTER_JOINING_CHAR)
for i, header in enumerate(headers):
header_footer_buf.write(self.HEADER_FOOTER_CHAR * len(header))
if i + 1 != column_count:
header_footer_buf.write(self.HEADER_FOOTER_JOINING_CHAR)
header_footer_buf.write(self.HEADER_FOOTER_JOINING_CHAR)
# Build the main header.
content_buf = six.StringIO()
content_buf.write(header_footer_buf.getvalue())
content_buf.write(self.LINE_SEP)
content_buf.write(self.COLUMN_STARTING_CHAR)
for i, header in enumerate(headers):
if i + 1 == column_count:
if self.COLUMN_ENDING_CHAR:
content_buf.write(headers[i])
content_buf.write(self.COLUMN_ENDING_CHAR)
else:
content_buf.write(headers[i].rstrip())
else:
content_buf.write(headers[i])
content_buf.write(self.COLUMN_SEPARATOR_CHAR)
content_buf.write(self.LINE_SEP)
content_buf.write(header_footer_buf.getvalue())
# Build the main content.
row_count = len(self._rows)
if row_count:
content_buf.write(self.LINE_SEP)
for i, row in enumerate(self._rows):
pieces = []
for j, column in enumerate(row):
pieces.append(self._center_text(column, column_sizes[j]))
if j + 1 != column_count:
pieces.append(self.COLUMN_SEPARATOR_CHAR)
blob = ''.join(pieces)
if self.COLUMN_ENDING_CHAR:
content_buf.write(self.COLUMN_STARTING_CHAR)
content_buf.write(blob)
content_buf.write(self.COLUMN_ENDING_CHAR)
else:
blob = blob.rstrip()
if blob:
content_buf.write(self.COLUMN_STARTING_CHAR)
content_buf.write(blob)
if i + 1 != row_count:
content_buf.write(self.LINE_SEP)
content_buf.write(self.LINE_SEP)
content_buf.write(header_footer_buf.getvalue())
return content_buf.getvalue()

View File

@@ -1,147 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import warnings
from oslo_utils import reflection
def deprecation(message, stacklevel=None):
"""Warns about some type of deprecation that has been (or will be) made.
This helper function makes it easier to interact with the warnings module
by standardizing the arguments that the warning function recieves so that
it is easier to use.
This should be used to emit warnings to users (users can easily turn these
warnings off/on, see https://docs.python.org/2/library/warnings.html
as they see fit so that the messages do not fill up the users logs with
warnings that they do not wish to see in production) about functions,
methods, attributes or other code that is deprecated and will be removed
in a future release (this is done using these warnings to avoid breaking
existing users of those functions, methods, code; which a library should
avoid doing by always giving at *least* N + 1 release for users to address
the deprecation warnings).
"""
if stacklevel is None:
warnings.warn(message, category=DeprecationWarning)
else:
warnings.warn(message,
category=DeprecationWarning, stacklevel=stacklevel)
# Helper accessors for the moved proxy (since it will not have easy access
# to its own getattr and setattr functions).
_setattr = object.__setattr__
_getattr = object.__getattribute__
class MovedClassProxy(object):
"""Acts as a proxy to a class that was moved to another location.
Partially based on:
http://code.activestate.com/recipes/496741-object-proxying/ and other
various examination of how to make a good enough proxy for our usage to
move the various types we want to move during the deprecation process.
And partially based on the wrapt object proxy (which we should just use
when it becomes available @ http://review.openstack.org/#/c/94754/).
"""
__slots__ = [
'__wrapped__', '__message__', '__stacklevel__',
# Ensure weakrefs can be made,
# https://docs.python.org/2/reference/datamodel.html#slots
'__weakref__',
]
def __init__(self, wrapped, message, stacklevel):
# We can't assign to these directly, since we are overriding getattr
# and setattr and delattr so we have to do this hoop jump to ensure
# that we don't invoke those methods (and cause infinite recursion).
_setattr(self, '__wrapped__', wrapped)
_setattr(self, '__message__', message)
_setattr(self, '__stacklevel__', stacklevel)
try:
_setattr(self, '__qualname__', wrapped.__qualname__)
except AttributeError:
pass
def __instancecheck__(self, instance):
deprecation(_getattr(self, '__message__'),
stacklevel=_getattr(self, '__stacklevel__'))
return isinstance(instance, _getattr(self, '__wrapped__'))
def __subclasscheck__(self, instance):
deprecation(_getattr(self, '__message__'),
stacklevel=_getattr(self, '__stacklevel__'))
return issubclass(instance, _getattr(self, '__wrapped__'))
def __call__(self, *args, **kwargs):
deprecation(_getattr(self, '__message__'),
stacklevel=_getattr(self, '__stacklevel__'))
return _getattr(self, '__wrapped__')(*args, **kwargs)
def __getattribute__(self, name):
return getattr(_getattr(self, '__wrapped__'), name)
def __setattr__(self, name, value):
setattr(_getattr(self, '__wrapped__'), name, value)
def __delattr__(self, name):
delattr(_getattr(self, '__wrapped__'), name)
def __repr__(self):
wrapped = _getattr(self, '__wrapped__')
return "<%s at 0x%x for %r at 0x%x>" % (
type(self).__name__, id(self), wrapped, id(wrapped))
def _generate_message(prefix, postfix=None, message=None,
version=None, removal_version=None):
message_components = [prefix]
if version:
message_components.append(" in version '%s'" % version)
if removal_version:
if removal_version == "?":
message_components.append(" and will be removed in a future"
" version")
else:
message_components.append(" and will be removed in version '%s'"
% removal_version)
if postfix:
message_components.append(postfix)
if message:
message_components.append(": %s" % message)
return ''.join(message_components)
def moved_proxy_class(new_class, old_class_name, old_module_name,
message=None, version=None, removal_version=None,
stacklevel=3):
"""Deprecates a class that was moved to another location.
This will emit warnings when the old locations class is initialized,
telling where the new and improved location for the old class now is.
"""
old_name = ".".join((old_module_name, old_class_name))
new_name = reflection.get_class_name(new_class)
prefix = "Class '%s' has moved to '%s'" % (old_name, new_name)
out_message = _generate_message(prefix,
message=message, version=version,
removal_version=removal_version)
return MovedClassProxy(new_class, out_message, stacklevel=stacklevel)

View File

@@ -37,8 +37,6 @@ from oslo_utils import reflection
import six
from taskflow.types import failure
from taskflow.types import notifier
from taskflow.utils import deprecation
UNKNOWN_HOSTNAME = "<unknown>"
@@ -479,16 +477,6 @@ def ensure_tree(path):
raise
Failure = deprecation.moved_proxy_class(failure.Failure,
'Failure', __name__,
version="0.6", removal_version="2.0")
Notifier = deprecation.moved_proxy_class(notifier.Notifier,
'Notifier', __name__,
version="0.6", removal_version="2.0")
@contextlib.contextmanager
def capture_failure():
"""Captures the occurring exception and provides a failure object back.