Fix entrypoints being updated/created by update.py

Previously entrypoints were not being created in the
target projects setup.cfg which means that the target
project would not be able to correctly use the taskflow
provided entrypoints. To fix this add a new helper
function to update.py that alters the entrypoint and
creates (or updates) the setup.cfg file where those
entrypoints are defined.

Fixes bug: 1235988

Change-Id: Ie32324c88e3c3642e58f3532e868f171f973d15e
This commit is contained in:
Joshua Harlow 2013-10-06 19:50:16 +00:00 committed by Joshua Harlow
parent 5acd843da4
commit 9417307d09

339
update.py
View File

@ -62,13 +62,21 @@ Obviously, the first way is the easiest!
from __future__ import print_function
import ConfigParser
import collections
import functools
import logging
import os
import os.path
import re
import shutil
import sys
import textwrap
import six
LOG = logging.getLogger(__name__)
from oslo.config import cfg
@ -87,21 +95,96 @@ OPTS = [
default=None,
help='A config file or destination project directory',
positional=True),
cfg.BoolOpt('verbose', default=False,
short='v',
help='Verbosely show what this program is doing'),
]
ALLOWED_PRIMITIVES = (
'flow',
'task',
'decorators',
'storage',
'engines',
'exceptions',
'flow',
'persistence',
'storage',
'task',
)
IMPORT_FROM = re.compile(r"^\s*from\s+" + BASE_MOD + r"\s*(.*)$")
BASE_CONF = '%s.conf' % (BASE_MOD)
MACHINE_GENERATED = ('# DO NOT EDIT THIS FILE BY HAND -- YOUR CHANGES WILL BE '
'OVERWRITTEN', '')
BLACK_LISTED = ()
ENTRY_FOOTER = [
'',
"Please make sure you have these installed.",
'',
]
ENTRY_WARN = """
Please install stevedore [https://pypi.python.org/pypi/stevedore] to make
sure that entrypoints can be loaded successfully. A setup.cfg file which is
required for discovery of these entrypoints was %(updated_or_created)s at
'%(location)s' which requires either pbr [https://pypi.python.org/pypi/pbr/]
or distutils2 (which is provided by default in python 3.3+)
[https://pypi.python.org/pypi/Distutils2].
"""
# These module names require entrypoint adjustments to work correctly in the
# target projects namespace (they also require stevedore and a setup.cfg file
# that includes references to there module location).
REQUIRES_ENTRYPOINTS = {
'engines.helpers': {
'target_mod': 'engines.helpers',
'replace': 'ENGINES_NAMESPACE',
'replacement': '%s.taskflow.engines',
'entrypoint': 'taskflow.engines',
},
'persistence.backends': {
'target_mod': 'persistence.backends',
'replace': 'BACKEND_NAMESPACE',
'replacement': '%s.taskflow.persistence',
'entrypoint': 'taskflow.persistence',
},
}
REQUIRES_ENTRYPOINTS['engines'] = REQUIRES_ENTRYPOINTS['engines.helpers']
def _warn_entrypoint(cfg_file, there_existed):
base_dir = os.path.basename(os.path.dirname(cfg_file))
cfg_file = os.path.join(base_dir, os.path.basename(cfg_file))
replacements = {
'location': cfg_file,
}
if there_existed:
replacements['updated_or_created'] = 'updated'
else:
replacements['updated_or_created'] = 'created'
text = ENTRY_WARN.strip()
text = text % replacements
lines = ['']
lines.extend(textwrap.wrap(text, width=79))
lines.extend(ENTRY_FOOTER)
for line in lines:
LOG.warn(line)
def _configure_logging(cfg):
if cfg.verbose:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(level=level, format='%(levelname)s: %(message)s')
def _take_entrypoint_line(line, mod_list):
line = line.strip()
if not line or line.find("=") == -1 or line.startswith("#"):
return True
_name, module = line.split("=", 1)
base_module = module.split(":")[0].strip()
if base_module.startswith("%s." % (BASE_MOD)):
base_module = _join_mod(*base_module.split(".")[1:])
if not base_module:
return False
return _is_prefix_of(base_module, mod_list)
def _parse_args(argv):
@ -168,13 +251,13 @@ def _make_dirs(path):
if not os.path.isdir(d):
dirs_needed.append(d)
if dirs_needed:
print("Creating directories for '%s'" % (dir_name))
LOG.debug("Creating directories for '%s'", dir_name)
for d in dirs_needed:
print(" '%s'" % (d))
LOG.debug(" '%s'", d)
os.mkdir(d)
init_path = os.path.join(d, '__init__.py')
if not os.path.exists(init_path):
print(" '%s'" % (init_path))
LOG.debug(" '%s'", init_path)
_drop_init(init_path)
@ -200,8 +283,8 @@ def _copy_file(path, dest, base, root_mods=None, common_already=None):
def _copy_it():
_make_dirs(dest)
print("Copying '%s'" % (path))
print(" '%s' -> '%s'" % (path, dest))
LOG.debug("Copying '%s'", path)
LOG.debug(" '%s' -> '%s'", path, dest)
shutil.copy2(path, dest)
def _form_mod(prefix, postfix):
@ -235,14 +318,14 @@ def _copy_file(path, dest, base, root_mods=None, common_already=None):
line = "%s\n" % _reform_import(_form_mod(prefix, postfix),
postfix, alias, comment)
if original_line != line:
print(" '%s' -> '%s'; line %s"
% (original_line.strip(), line.strip(), i + 1))
LOG.debug(" '%s' -> '%s'; line %s",
original_line.strip(), line.strip(), i + 1)
f.write(line)
# Only bother making it if we already didn't make it...
if not os.path.exists(dest):
_copy_it()
print("Fixing up '%s'" % (dest))
LOG.debug("Fixing up '%s'", dest)
_import_replace(dest)
_bulk_replace(dest,
'possible_topdir, "%s",$' % (BASE_MOD),
@ -285,12 +368,14 @@ def _copy_mod(mod, base, dest_dir, common_already=None, root_mods=None):
copy_pyfile(root_mods[mod])
exists, mod_file = _get_mod_path([mod], base=BASE_MOD)
if exists:
print("Creating module '%s'" % (_join_mod(base, BASE_MOD, mod)))
LOG.debug("Creating module '%s'", _join_mod(base, BASE_MOD, mod))
copy_pyfile(mod_file)
return mod_file
else:
if not root_existed:
raise IOError("Can not find module: %s" % (_join_mod(BASE_MOD,
mod)))
return root_mods[mod]
def _parse_import_line(line, linenum=-1, filename=None):
@ -396,7 +481,8 @@ def _build_dependency_tree():
mod_name = _join_mod(*segments)
root_mods[mod_name] = os.path.join(dirpath, filename)
filepath = os.path.join(dirpath, filename)
file_paths.append((filepath, mod_name))
if mod_name:
file_paths.append((filepath, mod_name))
# Analyze the individual files dependencies after we know exactly what the
# modules are so that we can find those modules if a individual file
# imports a module instead of a file.
@ -415,13 +501,16 @@ def _dfs_dependency_tree(dep_tree, mod_name, mod_list=[]):
return mod_list
def _complete_engine_list(engines):
if not engines:
def _complete_engines(engine_types):
if not engine_types:
return []
engine_mods = []
for engine_type in engines:
engine_mods = [
'engines',
'engines.base',
]
for engine_type in engine_types:
engine_type = engine_type.strip()
if not engine_type:
if not engine_type or engine_type in engine_mods:
continue
engine_mods.append(_join_mod('engines', engine_type))
mod = _join_mod('engines', engine_type, 'engine')
@ -433,23 +522,43 @@ def _complete_engine_list(engines):
return engine_mods
def _complete_flow_list(flows):
if not flows:
def _complete_flows(patterns):
if not patterns:
return []
flow_mods = []
for flow in flows:
flow = flow.strip()
if not flow:
pattern_mods = [
'patterns',
]
for p in patterns:
p = p.strip()
if not p or p in pattern_mods:
continue
mod = _join_mod('patterns', flow)
mod = _join_mod('patterns', p)
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
if not exists:
raise IOError("Flow %s file not found at: %s" % (flow, mod_path))
if flow in BLACK_LISTED:
raise IOError("Flow %s is currently disallowed until further"
" notice" % (flow))
flow_mods.append(mod)
return flow_mods
raise IOError("Flow pattern %s file not found at: %s"
% (p, mod_path))
pattern_mods.append(mod)
return pattern_mods
def _complete_persistence(backends):
if not backends:
return []
backend_mods = [
'persistence',
'persistence.logbook',
]
for b in backends:
b = b.strip()
if not b or b in backend_mods:
continue
mod = _join_mod("persistence", "backends", b)
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
if not exists:
raise IOError("Persistence backend %s file not found at: %s"
% (b, mod_path))
backend_mods.append(mod)
return backend_mods
def _is_prefix_of(prefix_text, haystack):
@ -485,21 +594,21 @@ def _find_existing(mod, base, dest_dir):
def _uniq_itr(itr):
seen = []
seen = set()
for i in itr:
if i in seen:
continue
seen.append(i)
seen.add(i)
yield i
def _rm_tree(base):
dirpaths = []
for dirpath, _tmp, filenames in os.walk(base):
print(" '%s' (X)" % (dirpath))
LOG.debug(" '%s' (X)", dirpath)
for filename in filenames:
filepath = os.path.join(dirpath, filename)
print(" '%s' (X)" % (filepath))
LOG.debug(" '%s' (X)", filepath)
os.unlink(filepath)
dirpaths.append(dirpath)
for d in reversed(dirpaths):
@ -508,6 +617,8 @@ def _rm_tree(base):
def main(argv):
conf = _parse_args(argv)
script_base = os.path.abspath(os.path.dirname(__file__))
_configure_logging(conf)
dest_dir = conf.dest_dir
if not dest_dir and conf.config_file:
@ -527,19 +638,13 @@ def main(argv):
p = ''
p_type = p_type.strip()
p = p.strip()
if not p_type:
continue
if p not in primitive_types[p_type]:
primitive_types[p_type].append(p)
# TODO(harlowja): for now these are the only primitives we are allowing to
# be copied over. Later add more as needed.
prims = 0
for k in ALLOWED_PRIMITIVES:
prims += len(primitive_types.get(k, []))
if prims <= 0:
allowed = ", ".join(sorted(ALLOWED_PRIMITIVES))
print("A list of primitives to copy is required "
"(%s is allowed)" % (allowed), file=sys.stderr)
sys.exit(1)
unknown_prims = []
for k in primitive_types.keys():
if k not in ALLOWED_PRIMITIVES:
@ -555,10 +660,15 @@ def main(argv):
print("A destination base module is required", file=sys.stderr)
sys.exit(1)
base_dir = os.path.join(dest_dir, conf.base)
def copy_mods(mod_list, root_mods):
common_already = {}
missing_common = set()
for mod in list(sorted(mod_list)):
# Take out the openstack.common modules that exist already in the
# containing project.
mod_list = list(mod_list)
for mod in list(mod_list):
# NOTE(harlowja): attempt to use the modules being copied to common
# folder as much as possible for modules that are needed for
# taskflow as this avoids duplicating openstack.common in the
@ -570,39 +680,146 @@ def main(argv):
mod_list.remove(mod)
else:
missing_common.add(mod)
there_common_mod = _join_mod(conf.base, 'openstack', 'common')
LOG.info("Copying %s modules into '%s'", len(mod_list), base_dir)
for m in mod_list:
LOG.info(" - %s", m)
if common_already:
print("The following modules will be used from the containing"
" projects '%s'" % (there_common_mod))
LOG.info("The following modules will be used from the containing"
" projects 'openstack.common'")
for mod in sorted(common_already.keys()):
target_mod = common_already[mod]
print(" '%s' -> '%s'" % (mod, target_mod))
LOG.info(" '%s' -> '%s'", mod, target_mod)
if missing_common:
print("The following modules will *not* be used from the"
" containing projects '%s'" % (there_common_mod))
LOG.info("The following modules will *not* be used from the"
" containing projects 'openstack.common'")
for mod in sorted(missing_common):
print(" '%s'" % (mod))
for mod in _uniq_itr(sorted(mod_list)):
_copy_mod(mod, conf.base, dest_dir,
common_already=common_already,
root_mods=root_mods)
LOG.info(" - %s", mod)
copied = set()
for mod in mod_list:
copied.add(_copy_mod(mod, conf.base, dest_dir,
common_already=common_already,
root_mods=root_mods))
LOG.debug("Copied %s modules", len(copied))
for m in sorted(copied):
LOG.debug(" - %s", m)
def clean_old():
old_base = os.path.join(dest_dir, conf.base, BASE_MOD)
if os.path.isdir(old_base):
print("Removing old %s tree found at '%s'" % (BASE_MOD, old_base))
LOG.info("Removing old %s tree found at '%s'", BASE_MOD, old_base)
_rm_tree(old_base)
find_what = _complete_flow_list(primitive_types.pop('flow', []))
find_what.extend(_complete_engine_list(primitive_types.get('engines', [])))
def create_entrypoints(mod_list, root_mods):
needed_entrypoints = set()
for k in REQUIRES_ENTRYPOINTS.keys():
for m in mod_list:
if m.startswith(k):
needed_entrypoints.add(k)
if not needed_entrypoints:
return
# Alter the source code locations that have the entry point name.
LOG.info("Altering %s entrypoint referencing modules:",
len(needed_entrypoints))
for m in sorted(needed_entrypoints):
LOG.info(" - %s", m)
entrypoints_adjusted = set()
for k in sorted(needed_entrypoints):
entrypoint_details = REQUIRES_ENTRYPOINTS[k]
entrypoint_target = entrypoint_details['target_mod']
there_entrypoint = (entrypoint_details['replacement'] % conf.base)
if entrypoint_target in entrypoints_adjusted:
continue
base_mod_path = root_mods.get(entrypoint_target)
if not base_mod_path:
existing_mod = _find_existing(entrypoint_target,
BASE_MOD, base_dir)
if existing_mod:
base_mod_path = _mod_to_path(existing_mod) + ".py"
if not base_mod_path:
raise IOError("Could not find entrypoint target %s" %
entrypoint_target)
dest_path = os.path.join(base_dir, base_mod_path)
if not os.path.isfile(dest_path):
raise IOError("Could not find entrypoint file %s" %
dest_path)
LOG.debug("Adjusting '%s' in '%s'", entrypoint_details['replace'],
dest_path)
pattern = r"%s\s*=.*" % (entrypoint_details['replace'])
replacement = entrypoint_details['replace']
replacement += " = '%s'" % (there_entrypoint)
LOG.debug("Replacing '%s' -> '%s'", pattern, replacement)
_bulk_replace(dest_path, pattern, replacement)
entrypoints_adjusted.add(entrypoint_target)
if not entrypoints_adjusted:
return
# Adjust there entrypoint configuration file (if it exists).
cfg_filename = os.path.join(dest_dir, "setup.cfg")
my_cfg_filename = os.path.join(script_base, 'setup.cfg')
LOG.debug("Adjusting entrypoint configuration in '%s' with entrypoints"
" from '%s'", cfg_filename, my_cfg_filename)
# Clear out there old entry points for taskflow
there_cfg = ConfigParser.RawConfigParser()
there_cfg.read([cfg_filename])
there_exists = os.path.isfile(cfg_filename)
for k in entrypoints_adjusted:
entrypoint_details = REQUIRES_ENTRYPOINTS[k]
entrypoint = entrypoint_details['entrypoint']
there_entrypoint = (entrypoint_details['replacement'] % conf.base)
try:
there_cfg.remove_option('entry_points', there_entrypoint)
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
pass
# Copy and modify my entry points into there entrypoints.
my_cfg = ConfigParser.RawConfigParser()
my_cfg.read([my_cfg_filename])
for k in sorted(entrypoints_adjusted):
entrypoint_details = REQUIRES_ENTRYPOINTS[k]
entrypoint = entrypoint_details['entrypoint']
there_entrypoint = (entrypoint_details['replacement'] % conf.base)
my_entries = my_cfg.get('entry_points', entrypoint)
there_entries = []
for line in my_entries.splitlines():
# NOTE(harlowja): only take the entrypoints that are relevant
# for the desired module list, skip the ones that are not.
if _take_entrypoint_line(line, mod_list):
new_line = re.sub(entrypoint, there_entrypoint, line)
there_entries.append(new_line)
try:
there_cfg.add_section('entry_points')
except ConfigParser.DuplicateSectionError:
pass
entry_value = os.linesep.join(there_entries)
there_cfg.set('entry_points', there_entrypoint, entry_value)
LOG.debug("Added entrypoint '%s'", there_entrypoint)
for line in there_entries:
line = line.strip()
if line:
LOG.debug(">> %s", line)
# ConfigParser seems to use tabs, instead of spaces, why!
buf = six.StringIO()
there_cfg.write(buf)
contents = buf.getvalue()
if contents.find("\t") != -1:
contents = contents.replace("\t", " " * 4)
if contents.find(" \n") != -1:
contents = contents.replace(' \n', '\n')
with open(cfg_filename, "wb") as fh:
fh.write(contents)
_warn_entrypoint(cfg_filename, there_exists)
find_what = _complete_flows(primitive_types.pop('flow', []))
find_what.extend(_complete_engines(primitive_types.get('engines')))
find_what.extend(_complete_persistence(primitive_types.get('persistence')))
find_what.extend(primitive_types.keys())
find_what = [f for f in _uniq_itr(find_what)]
copy_what, root_mods = _complete_module_list(find_what)
copy_what = sorted([m for m in _uniq_itr(copy_what)])
if copy_what:
clean_old()
copy_mods([m for m in _uniq_itr(copy_what)], root_mods)
copy_mods(copy_what, root_mods)
create_entrypoints(copy_what, root_mods)
else:
print("Nothing to copy.")
print("Nothing to copy.", file=sys.stderr)
if __name__ == "__main__":