switch to importlib.metadata package
Load entry points using 'importlib.metadata' instead of 'pkg_resources'. Include a caching layer. The cache stores the parsed text data from all of the ini input files in a single JSON file with a name based on the hash of the path entries and the mtimes. This should produce a unique filename for each import path, regardless of the use of a virtualenv. The data is stored in a format that means no other files need to be examined or parsed in order to return EntryPoint objects. Change-Id: I8b08f289d446f4775eac1e1a91997fa96f25f641 Depends-On: Ic6db7af34c87a636bfe55bacae03c42154f4b9c7 Signed-off-by: Doug Hellmann <doug@doughellmann.com>
This commit is contained in:
parent
01c12eca14
commit
d5297167e0
@ -7,7 +7,7 @@ and extend your application by discovering and loading extensions
|
|||||||
("*plugins*") at runtime. Many applications implement their own
|
("*plugins*") at runtime. Many applications implement their own
|
||||||
library for doing this, using ``__import__`` or
|
library for doing this, using ``__import__`` or
|
||||||
:mod:`importlib`. stevedore avoids creating yet another extension
|
:mod:`importlib`. stevedore avoids creating yet another extension
|
||||||
mechanism by building on top of `setuptools entry points`_. The code
|
mechanism by building on top of `entry points`_. The code
|
||||||
for managing entry points tends to be repetitive, though, so stevedore
|
for managing entry points tends to be repetitive, though, so stevedore
|
||||||
provides manager classes for implementing common patterns for using
|
provides manager classes for implementing common patterns for using
|
||||||
dynamically loaded extensions.
|
dynamically loaded extensions.
|
||||||
@ -21,7 +21,7 @@ dynamically loaded extensions.
|
|||||||
install/index
|
install/index
|
||||||
|
|
||||||
|
|
||||||
.. _setuptools entry points: http://setuptools.readthedocs.io/en/latest/pkg_resources.html?#entry-points
|
.. _entry points: https://docs.python.org/3/library/importlib.metadata.html#entry-points
|
||||||
|
|
||||||
.. rubric:: Indices and tables
|
.. rubric:: Indices and tables
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ defines the API expected by the plugin code. Each entry point has a
|
|||||||
name, which does not have to be unique within a given namespace. The
|
name, which does not have to be unique within a given namespace. The
|
||||||
flexibility of this name management system makes it possible to use
|
flexibility of this name management system makes it possible to use
|
||||||
plugins in a variety of ways. The manager classes in stevedore wrap
|
plugins in a variety of ways. The manager classes in stevedore wrap
|
||||||
:mod:`pkg_resources` to apply different rules matching the patterns
|
:mod:`importlib.metadata` to apply different rules matching the patterns
|
||||||
described here.
|
described here.
|
||||||
|
|
||||||
Drivers -- Single Name, Single Entry Point
|
Drivers -- Single Name, Single Entry Point
|
||||||
|
@ -109,7 +109,7 @@ for stevedore is located in ``stevedore.egg-info/entry_points.txt``:
|
|||||||
t2 = stevedore.tests.test_extension:FauxExtension
|
t2 = stevedore.tests.test_extension:FauxExtension
|
||||||
t1 = stevedore.tests.test_extension:FauxExtension
|
t1 = stevedore.tests.test_extension:FauxExtension
|
||||||
|
|
||||||
:mod:`pkg_resources` uses the ``entry_points.txt`` file from all of
|
:mod:`importlib.metadata` uses the ``entry_points.txt`` file from all of
|
||||||
the installed packages on the import path to find plugins. You should
|
the installed packages on the import path to find plugins. You should
|
||||||
not modify these files, except by changing the list of entry points in
|
not modify these files, except by changing the list of entry points in
|
||||||
``setup.py``.
|
``setup.py``.
|
||||||
|
@ -20,7 +20,9 @@ application.
|
|||||||
* `Using setuptools entry points`_
|
* `Using setuptools entry points`_
|
||||||
* `Package Discovery and Resource Access using pkg_resources`_
|
* `Package Discovery and Resource Access using pkg_resources`_
|
||||||
* `Using Entry Points to Write Plugins | Pylons`_
|
* `Using Entry Points to Write Plugins | Pylons`_
|
||||||
|
* `importlib.metadata`_
|
||||||
|
|
||||||
.. _Using setuptools entry points: http://reinout.vanrees.org/weblog/2010/01/06/zest-releaser-entry-points.html
|
.. _Using setuptools entry points: http://reinout.vanrees.org/weblog/2010/01/06/zest-releaser-entry-points.html
|
||||||
.. _Package Discovery and Resource Access using pkg_resources: http://pythonhosted.org/distribute/pkg_resources.html
|
.. _Package Discovery and Resource Access using pkg_resources: http://pythonhosted.org/distribute/pkg_resources.html
|
||||||
.. _Using Entry Points to Write Plugins | Pylons: http://docs.pylonsproject.org/projects/pylons-webframework/en/latest/advanced_pylons/entry_points_and_plugins.html
|
.. _Using Entry Points to Write Plugins | Pylons: http://docs.pylonsproject.org/projects/pylons-webframework/en/latest/advanced_pylons/entry_points_and_plugins.html
|
||||||
|
.. _importlib.metadata: https://docs.python.org/3/library/importlib.metadata.html#entry-points
|
||||||
|
@ -81,7 +81,7 @@ with :meth:`map` in this example takes two arguments, the
|
|||||||
|
|
||||||
The :class:`Extension` passed :func:`format_data` is a class defined
|
The :class:`Extension` passed :func:`format_data` is a class defined
|
||||||
by stevedore that wraps the plugin. It includes the name of the
|
by stevedore that wraps the plugin. It includes the name of the
|
||||||
plugin, the :class:`EntryPoint` returned by :mod:`pkg_resources`, and
|
plugin, the :class:`EntryPoint` returned by :mod:`importlib.metadata`, and
|
||||||
the plugin itself (the named object referenced by the plugin
|
the plugin itself (the named object referenced by the plugin
|
||||||
definition). When ``invoke_on_load`` is true, the :class:`Extension`
|
definition). When ``invoke_on_load`` is true, the :class:`Extension`
|
||||||
will also have an :attr:`obj` attribute containing the value returned
|
will also have an :attr:`obj` attribute containing the value returned
|
||||||
|
@ -8,6 +8,7 @@ fixtures==3.0.0
|
|||||||
gitdb==0.6.4
|
gitdb==0.6.4
|
||||||
GitPython==1.0.1
|
GitPython==1.0.1
|
||||||
imagesize==0.7.1
|
imagesize==0.7.1
|
||||||
|
importlib_metadata==1.7.0
|
||||||
Jinja2==2.10
|
Jinja2==2.10
|
||||||
linecache2==1.0.0
|
linecache2==1.0.0
|
||||||
MarkupSafe==1.0
|
MarkupSafe==1.0
|
||||||
|
@ -3,3 +3,4 @@
|
|||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
||||||
|
importlib_metadata>=1.7.0;python_version<'3.8' # Apache-2.0
|
||||||
|
195
stevedore/_cache.py
Normal file
195
stevedore/_cache.py
Normal file
@ -0,0 +1,195 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Use a cache layer in front of entry point scanning."""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import glob
|
||||||
|
import hashlib
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
# For python 3.8 and later
|
||||||
|
import importlib.metadata as importlib_metadata
|
||||||
|
except ImportError:
|
||||||
|
# For everyone else
|
||||||
|
import importlib_metadata
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger('stevedore._cache')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_cache_dir():
|
||||||
|
"""Locate a platform-appropriate cache directory to use.
|
||||||
|
|
||||||
|
Does not ensure that the cache directory exists.
|
||||||
|
"""
|
||||||
|
# Linux, Unix, AIX, etc.
|
||||||
|
if os.name == 'posix' and sys.platform != 'darwin':
|
||||||
|
# use ~/.cache if empty OR not set
|
||||||
|
base_path = os.environ.get("XDG_CACHE_HOME", None) \
|
||||||
|
or os.path.expanduser('~/.cache')
|
||||||
|
return os.path.join(base_path, 'python-entrypoints')
|
||||||
|
|
||||||
|
# Mac OS
|
||||||
|
elif sys.platform == 'darwin':
|
||||||
|
return os.path.expanduser('~/Library/Caches/Python Entry Points')
|
||||||
|
|
||||||
|
# Windows (hopefully)
|
||||||
|
else:
|
||||||
|
base_path = os.environ.get('LOCALAPPDATA', None) \
|
||||||
|
or os.path.expanduser('~\\AppData\\Local')
|
||||||
|
return os.path.join(base_path, 'Python Entry Points')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_mtime(name):
|
||||||
|
try:
|
||||||
|
s = os.stat(name)
|
||||||
|
return s.st_mtime
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
return -1.0
|
||||||
|
|
||||||
|
|
||||||
|
def _ftobytes(f):
|
||||||
|
return struct.Struct('f').pack(f)
|
||||||
|
|
||||||
|
|
||||||
|
def _hash_settings_for_path(path):
|
||||||
|
"""Return a hash and the path settings that created it.
|
||||||
|
"""
|
||||||
|
paths = []
|
||||||
|
h = hashlib.sha256()
|
||||||
|
|
||||||
|
# Tie the cache to the python interpreter, in case it is part of a
|
||||||
|
# virtualenv.
|
||||||
|
h.update(sys.executable.encode('utf-8'))
|
||||||
|
h.update(sys.prefix.encode('utf-8'))
|
||||||
|
|
||||||
|
for entry in path:
|
||||||
|
mtime = _get_mtime(entry)
|
||||||
|
h.update(entry.encode('utf-8'))
|
||||||
|
h.update(_ftobytes(mtime))
|
||||||
|
paths.append((entry, mtime))
|
||||||
|
|
||||||
|
for ep_file in itertools.chain(
|
||||||
|
glob.iglob(os.path.join(entry,
|
||||||
|
'*.dist-info',
|
||||||
|
'entry_points.txt')),
|
||||||
|
glob.iglob(os.path.join(entry,
|
||||||
|
'*.egg-info',
|
||||||
|
'entry_points.txt'))
|
||||||
|
):
|
||||||
|
mtime = _get_mtime(ep_file)
|
||||||
|
h.update(ep_file.encode('utf-8'))
|
||||||
|
h.update(_ftobytes(mtime))
|
||||||
|
paths.append((ep_file, mtime))
|
||||||
|
|
||||||
|
return (h.hexdigest(), paths)
|
||||||
|
|
||||||
|
|
||||||
|
def _build_cacheable_data(path):
|
||||||
|
real_groups = importlib_metadata.entry_points()
|
||||||
|
# Convert the namedtuple values to regular tuples
|
||||||
|
groups = {}
|
||||||
|
for name, group_data in real_groups.items():
|
||||||
|
existing = set()
|
||||||
|
members = []
|
||||||
|
groups[name] = members
|
||||||
|
for ep in group_data:
|
||||||
|
# Filter out duplicates that can occur when testing a
|
||||||
|
# package that provides entry points using tox, where the
|
||||||
|
# package is installed in the virtualenv that tox builds
|
||||||
|
# and is present in the path as '.'.
|
||||||
|
item = ep[:] # convert namedtuple to tuple
|
||||||
|
if item in existing:
|
||||||
|
continue
|
||||||
|
existing.add(item)
|
||||||
|
members.append(item)
|
||||||
|
return {
|
||||||
|
'groups': groups,
|
||||||
|
'sys.executable': sys.executable,
|
||||||
|
'sys.prefix': sys.prefix,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
|
||||||
|
def __init__(self, cache_dir=None):
|
||||||
|
if cache_dir is None:
|
||||||
|
cache_dir = _get_cache_dir()
|
||||||
|
self._dir = cache_dir
|
||||||
|
self._internal = {}
|
||||||
|
|
||||||
|
def _get_data_for_path(self, path):
|
||||||
|
if path is None:
|
||||||
|
path = sys.path
|
||||||
|
|
||||||
|
internal_key = tuple(path)
|
||||||
|
if internal_key in self._internal:
|
||||||
|
return self._internal[internal_key]
|
||||||
|
|
||||||
|
digest, path_values = _hash_settings_for_path(path)
|
||||||
|
filename = os.path.join(self._dir, digest)
|
||||||
|
try:
|
||||||
|
log.debug('reading %s', filename)
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
except (IOError, json.JSONDecodeError):
|
||||||
|
data = _build_cacheable_data(path)
|
||||||
|
data['path_values'] = path_values
|
||||||
|
try:
|
||||||
|
log.debug('writing to %s', filename)
|
||||||
|
os.makedirs(self._dir, exist_ok=True)
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
json.dump(data, f)
|
||||||
|
except (IOError, OSError):
|
||||||
|
# Could not create cache dir or write file.
|
||||||
|
pass
|
||||||
|
|
||||||
|
self._internal[internal_key] = data
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_group_all(self, group, path=None):
|
||||||
|
result = []
|
||||||
|
data = self._get_data_for_path(path)
|
||||||
|
group_data = data.get('groups', {}).get(group, [])
|
||||||
|
for vals in group_data:
|
||||||
|
result.append(importlib_metadata.EntryPoint(*vals))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_group_named(self, group, path=None):
|
||||||
|
result = {}
|
||||||
|
for ep in self.get_group_all(group, path=path):
|
||||||
|
if ep.name not in result:
|
||||||
|
result[ep.name] = ep
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_single(self, group, name, path=None):
|
||||||
|
for name, ep in self.get_group_named(group, path=path).items():
|
||||||
|
if name == name:
|
||||||
|
return ep
|
||||||
|
raise ValueError('No entrypoint {!r} in group {!r}'.format(
|
||||||
|
group, name))
|
||||||
|
|
||||||
|
|
||||||
|
_c = Cache()
|
||||||
|
get_group_all = _c.get_group_all
|
||||||
|
get_group_named = _c.get_group_named
|
||||||
|
get_single = _c.get_single
|
@ -14,10 +14,9 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import operator
|
import operator
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from . import _cache
|
||||||
from .exception import NoMatches
|
from .exception import NoMatches
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -34,7 +33,7 @@ class Extension(object):
|
|||||||
:param name: The entry point name.
|
:param name: The entry point name.
|
||||||
:type name: str
|
:type name: str
|
||||||
:param entry_point: The EntryPoint instance returned by
|
:param entry_point: The EntryPoint instance returned by
|
||||||
:mod:`pkg_resources`.
|
:mod:`entrypoints`.
|
||||||
:type entry_point: EntryPoint
|
:type entry_point: EntryPoint
|
||||||
:param plugin: The value returned by entry_point.load()
|
:param plugin: The value returned by entry_point.load()
|
||||||
:param obj: The object returned by ``plugin(*args, **kwds)`` if the
|
:param obj: The object returned by ``plugin(*args, **kwds)`` if the
|
||||||
@ -55,8 +54,7 @@ class Extension(object):
|
|||||||
:return: A string representation of the target of the entry point in
|
:return: A string representation of the target of the entry point in
|
||||||
'dotted.module:object' format.
|
'dotted.module:object' format.
|
||||||
"""
|
"""
|
||||||
return '%s:%s' % (self.entry_point.module_name,
|
return self.entry_point.value
|
||||||
self.entry_point.attrs[0])
|
|
||||||
|
|
||||||
|
|
||||||
class ExtensionManager(object):
|
class ExtensionManager(object):
|
||||||
@ -174,7 +172,7 @@ class ExtensionManager(object):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if self.namespace not in self.ENTRY_POINT_CACHE:
|
if self.namespace not in self.ENTRY_POINT_CACHE:
|
||||||
eps = list(pkg_resources.iter_entry_points(self.namespace))
|
eps = list(_cache.get_group_all(self.namespace))
|
||||||
self.ENTRY_POINT_CACHE[self.namespace] = eps
|
self.ENTRY_POINT_CACHE[self.namespace] = eps
|
||||||
return self.ENTRY_POINT_CACHE[self.namespace]
|
return self.ENTRY_POINT_CACHE[self.namespace]
|
||||||
|
|
||||||
@ -222,7 +220,7 @@ class ExtensionManager(object):
|
|||||||
ep.require()
|
ep.require()
|
||||||
plugin = ep.resolve()
|
plugin = ep.resolve()
|
||||||
else:
|
else:
|
||||||
plugin = ep.load(require=verify_requirements)
|
plugin = ep.load()
|
||||||
if invoke_on_load:
|
if invoke_on_load:
|
||||||
obj = plugin(*invoke_args, **invoke_kwds)
|
obj = plugin(*invoke_args, **invoke_kwds)
|
||||||
else:
|
else:
|
||||||
|
@ -34,29 +34,29 @@ def _simple_list(mgr):
|
|||||||
doc = _get_docstring(ext.plugin) or '\n'
|
doc = _get_docstring(ext.plugin) or '\n'
|
||||||
summary = doc.splitlines()[0].strip()
|
summary = doc.splitlines()[0].strip()
|
||||||
yield('* %s -- %s' % (ext.name, summary),
|
yield('* %s -- %s' % (ext.name, summary),
|
||||||
ext.entry_point.module_name)
|
ext.entry_point.module)
|
||||||
|
|
||||||
|
|
||||||
def _detailed_list(mgr, over='', under='-', titlecase=False):
|
def _detailed_list(mgr, over='', under='-', titlecase=False):
|
||||||
for name in sorted(mgr.names()):
|
for name in sorted(mgr.names()):
|
||||||
ext = mgr[name]
|
ext = mgr[name]
|
||||||
if over:
|
if over:
|
||||||
yield (over * len(ext.name), ext.entry_point.module_name)
|
yield (over * len(ext.name), ext.entry_point.module)
|
||||||
if titlecase:
|
if titlecase:
|
||||||
yield (ext.name.title(), ext.entry_point.module_name)
|
yield (ext.name.title(), ext.entry_point.module)
|
||||||
else:
|
else:
|
||||||
yield (ext.name, ext.entry_point.module_name)
|
yield (ext.name, ext.entry_point.module)
|
||||||
if under:
|
if under:
|
||||||
yield (under * len(ext.name), ext.entry_point.module_name)
|
yield (under * len(ext.name), ext.entry_point.module)
|
||||||
yield ('\n', ext.entry_point.module_name)
|
yield ('\n', ext.entry_point.module)
|
||||||
doc = _get_docstring(ext.plugin)
|
doc = _get_docstring(ext.plugin)
|
||||||
if doc:
|
if doc:
|
||||||
yield (doc, ext.entry_point.module_name)
|
yield (doc, ext.entry_point.module)
|
||||||
else:
|
else:
|
||||||
yield ('.. warning:: No documentation found in %s'
|
yield ('.. warning:: No documentation found in %s'
|
||||||
% ext.entry_point,
|
% ext.entry_point,
|
||||||
ext.entry_point.module_name)
|
ext.entry_point.module)
|
||||||
yield ('\n', ext.entry_point.module_name)
|
yield ('\n', ext.entry_point.module)
|
||||||
|
|
||||||
|
|
||||||
class ListPluginsDirective(rst.Directive):
|
class ListPluginsDirective(rst.Directive):
|
||||||
@ -79,7 +79,7 @@ class ListPluginsDirective(rst.Directive):
|
|||||||
underline_style = self.options.get('underline-style', '=')
|
underline_style = self.options.get('underline-style', '=')
|
||||||
|
|
||||||
def report_load_failure(mgr, ep, err):
|
def report_load_failure(mgr, ep, err):
|
||||||
LOG.warning(u'Failed to load %s: %s' % (ep.module_name, err))
|
LOG.warning(u'Failed to load %s: %s' % (ep.module, err))
|
||||||
|
|
||||||
mgr = extension.ExtensionManager(
|
mgr = extension.ExtensionManager(
|
||||||
namespace,
|
namespace,
|
||||||
|
@ -13,7 +13,12 @@
|
|||||||
"""Tests for stevedore.extension
|
"""Tests for stevedore.extension
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pkg_resources
|
try:
|
||||||
|
# For python 3.8 and later
|
||||||
|
import importlib.metadata as importlib_metadata
|
||||||
|
except ImportError:
|
||||||
|
# For everyone else
|
||||||
|
import importlib_metadata
|
||||||
|
|
||||||
from stevedore import driver
|
from stevedore import driver
|
||||||
from stevedore import exception
|
from stevedore import exception
|
||||||
@ -68,13 +73,15 @@ class TestCallback(utils.TestCase):
|
|||||||
extensions = [
|
extensions = [
|
||||||
extension.Extension(
|
extension.Extension(
|
||||||
'backend',
|
'backend',
|
||||||
pkg_resources.EntryPoint.parse('backend = pkg1:driver'),
|
importlib_metadata.EntryPoint(
|
||||||
|
'backend', 'pkg1:driver', 'backend'),
|
||||||
'pkg backend',
|
'pkg backend',
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
extension.Extension(
|
extension.Extension(
|
||||||
'backend',
|
'backend',
|
||||||
pkg_resources.EntryPoint.parse('backend = pkg2:driver'),
|
importlib_metadata.EntryPoint(
|
||||||
|
'backend', 'pkg2:driver', 'backend'),
|
||||||
'pkg backend',
|
'pkg backend',
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
@ -96,13 +96,13 @@ class TestCallback(utils.TestCase):
|
|||||||
|
|
||||||
def test_use_cache(self):
|
def test_use_cache(self):
|
||||||
# If we insert something into the cache of entry points,
|
# If we insert something into the cache of entry points,
|
||||||
# the manager should not have to call into pkg_resources
|
# the manager should not have to call into entrypoints
|
||||||
# to find the plugins.
|
# to find the plugins.
|
||||||
cache = extension.ExtensionManager.ENTRY_POINT_CACHE
|
cache = extension.ExtensionManager.ENTRY_POINT_CACHE
|
||||||
cache['stevedore.test.faux'] = []
|
cache['stevedore.test.faux'] = []
|
||||||
with mock.patch('pkg_resources.iter_entry_points',
|
with mock.patch('stevedore._cache.get_group_all',
|
||||||
side_effect=
|
side_effect=
|
||||||
AssertionError('called iter_entry_points')):
|
AssertionError('called get_group_all')):
|
||||||
em = extension.ExtensionManager('stevedore.test.faux')
|
em = extension.ExtensionManager('stevedore.test.faux')
|
||||||
names = em.names()
|
names = em.names()
|
||||||
self.assertEqual(names, [])
|
self.assertEqual(names, [])
|
||||||
@ -235,9 +235,9 @@ class TestLoadRequirementsOldSetuptools(utils.TestCase):
|
|||||||
def test_verify_requirements(self):
|
def test_verify_requirements(self):
|
||||||
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
||||||
verify_requirements=True)
|
verify_requirements=True)
|
||||||
self.mock_ep.load.assert_called_once_with(require=True)
|
self.mock_ep.load.assert_called_once_with()
|
||||||
|
|
||||||
def test_no_verify_requirements(self):
|
def test_no_verify_requirements(self):
|
||||||
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
||||||
verify_requirements=False)
|
verify_requirements=False)
|
||||||
self.mock_ep.load.assert_called_once_with(require=False)
|
self.mock_ep.load.assert_called_once_with()
|
||||||
|
@ -14,20 +14,25 @@
|
|||||||
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
try:
|
||||||
|
# For python 3.8 and later
|
||||||
|
import importlib.metadata as importlib_metadata
|
||||||
|
except ImportError:
|
||||||
|
# For everyone else
|
||||||
|
import importlib_metadata
|
||||||
|
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
from stevedore import sphinxext
|
from stevedore import sphinxext
|
||||||
from stevedore.tests import utils
|
from stevedore.tests import utils
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
|
|
||||||
def _make_ext(name, docstring):
|
def _make_ext(name, docstring):
|
||||||
def inner():
|
def inner():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
inner.__doc__ = docstring
|
inner.__doc__ = docstring
|
||||||
m1 = mock.Mock(spec=pkg_resources.EntryPoint)
|
m1 = mock.Mock(spec=importlib_metadata.EntryPoint)
|
||||||
m1.module_name = '%s_module' % name
|
m1.module = '%s_module' % name
|
||||||
s = mock.Mock(return_value='ENTRY_POINT(%s)' % name)
|
s = mock.Mock(return_value='ENTRY_POINT(%s)' % name)
|
||||||
m1.__str__ = s
|
m1.__str__ = s
|
||||||
return extension.Extension(name, m1, inner, None)
|
return extension.Extension(name, m1, inner, None)
|
||||||
|
Loading…
Reference in New Issue
Block a user