Merge "Fix entrypoints being updated/created by update.py"
This commit is contained in:
339
update.py
339
update.py
@@ -62,13 +62,21 @@ Obviously, the first way is the easiest!
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import ConfigParser
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import functools
|
import functools
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
@@ -87,21 +95,96 @@ OPTS = [
|
|||||||
default=None,
|
default=None,
|
||||||
help='A config file or destination project directory',
|
help='A config file or destination project directory',
|
||||||
positional=True),
|
positional=True),
|
||||||
|
cfg.BoolOpt('verbose', default=False,
|
||||||
|
short='v',
|
||||||
|
help='Verbosely show what this program is doing'),
|
||||||
]
|
]
|
||||||
ALLOWED_PRIMITIVES = (
|
ALLOWED_PRIMITIVES = (
|
||||||
'flow',
|
|
||||||
'task',
|
|
||||||
'decorators',
|
'decorators',
|
||||||
'storage',
|
|
||||||
'engines',
|
'engines',
|
||||||
'exceptions',
|
'exceptions',
|
||||||
|
'flow',
|
||||||
|
'persistence',
|
||||||
|
'storage',
|
||||||
|
'task',
|
||||||
)
|
)
|
||||||
IMPORT_FROM = re.compile(r"^\s*from\s+" + BASE_MOD + r"\s*(.*)$")
|
IMPORT_FROM = re.compile(r"^\s*from\s+" + BASE_MOD + r"\s*(.*)$")
|
||||||
BASE_CONF = '%s.conf' % (BASE_MOD)
|
BASE_CONF = '%s.conf' % (BASE_MOD)
|
||||||
MACHINE_GENERATED = ('# DO NOT EDIT THIS FILE BY HAND -- YOUR CHANGES WILL BE '
|
MACHINE_GENERATED = ('# DO NOT EDIT THIS FILE BY HAND -- YOUR CHANGES WILL BE '
|
||||||
'OVERWRITTEN', '')
|
'OVERWRITTEN', '')
|
||||||
|
|
||||||
BLACK_LISTED = ()
|
ENTRY_FOOTER = [
|
||||||
|
'',
|
||||||
|
"Please make sure you have these installed.",
|
||||||
|
'',
|
||||||
|
]
|
||||||
|
ENTRY_WARN = """
|
||||||
|
Please install stevedore [https://pypi.python.org/pypi/stevedore] to make
|
||||||
|
sure that entrypoints can be loaded successfully. A setup.cfg file which is
|
||||||
|
required for discovery of these entrypoints was %(updated_or_created)s at
|
||||||
|
'%(location)s' which requires either pbr [https://pypi.python.org/pypi/pbr/]
|
||||||
|
or distutils2 (which is provided by default in python 3.3+)
|
||||||
|
[https://pypi.python.org/pypi/Distutils2].
|
||||||
|
"""
|
||||||
|
|
||||||
|
# These module names require entrypoint adjustments to work correctly in the
|
||||||
|
# target projects namespace (they also require stevedore and a setup.cfg file
|
||||||
|
# that includes references to there module location).
|
||||||
|
REQUIRES_ENTRYPOINTS = {
|
||||||
|
'engines.helpers': {
|
||||||
|
'target_mod': 'engines.helpers',
|
||||||
|
'replace': 'ENGINES_NAMESPACE',
|
||||||
|
'replacement': '%s.taskflow.engines',
|
||||||
|
'entrypoint': 'taskflow.engines',
|
||||||
|
},
|
||||||
|
'persistence.backends': {
|
||||||
|
'target_mod': 'persistence.backends',
|
||||||
|
'replace': 'BACKEND_NAMESPACE',
|
||||||
|
'replacement': '%s.taskflow.persistence',
|
||||||
|
'entrypoint': 'taskflow.persistence',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
REQUIRES_ENTRYPOINTS['engines'] = REQUIRES_ENTRYPOINTS['engines.helpers']
|
||||||
|
|
||||||
|
|
||||||
|
def _warn_entrypoint(cfg_file, there_existed):
|
||||||
|
base_dir = os.path.basename(os.path.dirname(cfg_file))
|
||||||
|
cfg_file = os.path.join(base_dir, os.path.basename(cfg_file))
|
||||||
|
replacements = {
|
||||||
|
'location': cfg_file,
|
||||||
|
}
|
||||||
|
if there_existed:
|
||||||
|
replacements['updated_or_created'] = 'updated'
|
||||||
|
else:
|
||||||
|
replacements['updated_or_created'] = 'created'
|
||||||
|
text = ENTRY_WARN.strip()
|
||||||
|
text = text % replacements
|
||||||
|
lines = ['']
|
||||||
|
lines.extend(textwrap.wrap(text, width=79))
|
||||||
|
lines.extend(ENTRY_FOOTER)
|
||||||
|
for line in lines:
|
||||||
|
LOG.warn(line)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_logging(cfg):
|
||||||
|
if cfg.verbose:
|
||||||
|
level = logging.DEBUG
|
||||||
|
else:
|
||||||
|
level = logging.INFO
|
||||||
|
logging.basicConfig(level=level, format='%(levelname)s: %(message)s')
|
||||||
|
|
||||||
|
|
||||||
|
def _take_entrypoint_line(line, mod_list):
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line.find("=") == -1 or line.startswith("#"):
|
||||||
|
return True
|
||||||
|
_name, module = line.split("=", 1)
|
||||||
|
base_module = module.split(":")[0].strip()
|
||||||
|
if base_module.startswith("%s." % (BASE_MOD)):
|
||||||
|
base_module = _join_mod(*base_module.split(".")[1:])
|
||||||
|
if not base_module:
|
||||||
|
return False
|
||||||
|
return _is_prefix_of(base_module, mod_list)
|
||||||
|
|
||||||
|
|
||||||
def _parse_args(argv):
|
def _parse_args(argv):
|
||||||
@@ -168,13 +251,13 @@ def _make_dirs(path):
|
|||||||
if not os.path.isdir(d):
|
if not os.path.isdir(d):
|
||||||
dirs_needed.append(d)
|
dirs_needed.append(d)
|
||||||
if dirs_needed:
|
if dirs_needed:
|
||||||
print("Creating directories for '%s'" % (dir_name))
|
LOG.debug("Creating directories for '%s'", dir_name)
|
||||||
for d in dirs_needed:
|
for d in dirs_needed:
|
||||||
print(" '%s'" % (d))
|
LOG.debug(" '%s'", d)
|
||||||
os.mkdir(d)
|
os.mkdir(d)
|
||||||
init_path = os.path.join(d, '__init__.py')
|
init_path = os.path.join(d, '__init__.py')
|
||||||
if not os.path.exists(init_path):
|
if not os.path.exists(init_path):
|
||||||
print(" '%s'" % (init_path))
|
LOG.debug(" '%s'", init_path)
|
||||||
_drop_init(init_path)
|
_drop_init(init_path)
|
||||||
|
|
||||||
|
|
||||||
@@ -200,8 +283,8 @@ def _copy_file(path, dest, base, root_mods=None, common_already=None):
|
|||||||
|
|
||||||
def _copy_it():
|
def _copy_it():
|
||||||
_make_dirs(dest)
|
_make_dirs(dest)
|
||||||
print("Copying '%s'" % (path))
|
LOG.debug("Copying '%s'", path)
|
||||||
print(" '%s' -> '%s'" % (path, dest))
|
LOG.debug(" '%s' -> '%s'", path, dest)
|
||||||
shutil.copy2(path, dest)
|
shutil.copy2(path, dest)
|
||||||
|
|
||||||
def _form_mod(prefix, postfix):
|
def _form_mod(prefix, postfix):
|
||||||
@@ -235,14 +318,14 @@ def _copy_file(path, dest, base, root_mods=None, common_already=None):
|
|||||||
line = "%s\n" % _reform_import(_form_mod(prefix, postfix),
|
line = "%s\n" % _reform_import(_form_mod(prefix, postfix),
|
||||||
postfix, alias, comment)
|
postfix, alias, comment)
|
||||||
if original_line != line:
|
if original_line != line:
|
||||||
print(" '%s' -> '%s'; line %s"
|
LOG.debug(" '%s' -> '%s'; line %s",
|
||||||
% (original_line.strip(), line.strip(), i + 1))
|
original_line.strip(), line.strip(), i + 1)
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
|
||||||
# Only bother making it if we already didn't make it...
|
# Only bother making it if we already didn't make it...
|
||||||
if not os.path.exists(dest):
|
if not os.path.exists(dest):
|
||||||
_copy_it()
|
_copy_it()
|
||||||
print("Fixing up '%s'" % (dest))
|
LOG.debug("Fixing up '%s'", dest)
|
||||||
_import_replace(dest)
|
_import_replace(dest)
|
||||||
_bulk_replace(dest,
|
_bulk_replace(dest,
|
||||||
'possible_topdir, "%s",$' % (BASE_MOD),
|
'possible_topdir, "%s",$' % (BASE_MOD),
|
||||||
@@ -285,12 +368,14 @@ def _copy_mod(mod, base, dest_dir, common_already=None, root_mods=None):
|
|||||||
copy_pyfile(root_mods[mod])
|
copy_pyfile(root_mods[mod])
|
||||||
exists, mod_file = _get_mod_path([mod], base=BASE_MOD)
|
exists, mod_file = _get_mod_path([mod], base=BASE_MOD)
|
||||||
if exists:
|
if exists:
|
||||||
print("Creating module '%s'" % (_join_mod(base, BASE_MOD, mod)))
|
LOG.debug("Creating module '%s'", _join_mod(base, BASE_MOD, mod))
|
||||||
copy_pyfile(mod_file)
|
copy_pyfile(mod_file)
|
||||||
|
return mod_file
|
||||||
else:
|
else:
|
||||||
if not root_existed:
|
if not root_existed:
|
||||||
raise IOError("Can not find module: %s" % (_join_mod(BASE_MOD,
|
raise IOError("Can not find module: %s" % (_join_mod(BASE_MOD,
|
||||||
mod)))
|
mod)))
|
||||||
|
return root_mods[mod]
|
||||||
|
|
||||||
|
|
||||||
def _parse_import_line(line, linenum=-1, filename=None):
|
def _parse_import_line(line, linenum=-1, filename=None):
|
||||||
@@ -396,7 +481,8 @@ def _build_dependency_tree():
|
|||||||
mod_name = _join_mod(*segments)
|
mod_name = _join_mod(*segments)
|
||||||
root_mods[mod_name] = os.path.join(dirpath, filename)
|
root_mods[mod_name] = os.path.join(dirpath, filename)
|
||||||
filepath = os.path.join(dirpath, filename)
|
filepath = os.path.join(dirpath, filename)
|
||||||
file_paths.append((filepath, mod_name))
|
if mod_name:
|
||||||
|
file_paths.append((filepath, mod_name))
|
||||||
# Analyze the individual files dependencies after we know exactly what the
|
# Analyze the individual files dependencies after we know exactly what the
|
||||||
# modules are so that we can find those modules if a individual file
|
# modules are so that we can find those modules if a individual file
|
||||||
# imports a module instead of a file.
|
# imports a module instead of a file.
|
||||||
@@ -415,13 +501,16 @@ def _dfs_dependency_tree(dep_tree, mod_name, mod_list=[]):
|
|||||||
return mod_list
|
return mod_list
|
||||||
|
|
||||||
|
|
||||||
def _complete_engine_list(engines):
|
def _complete_engines(engine_types):
|
||||||
if not engines:
|
if not engine_types:
|
||||||
return []
|
return []
|
||||||
engine_mods = []
|
engine_mods = [
|
||||||
for engine_type in engines:
|
'engines',
|
||||||
|
'engines.base',
|
||||||
|
]
|
||||||
|
for engine_type in engine_types:
|
||||||
engine_type = engine_type.strip()
|
engine_type = engine_type.strip()
|
||||||
if not engine_type:
|
if not engine_type or engine_type in engine_mods:
|
||||||
continue
|
continue
|
||||||
engine_mods.append(_join_mod('engines', engine_type))
|
engine_mods.append(_join_mod('engines', engine_type))
|
||||||
mod = _join_mod('engines', engine_type, 'engine')
|
mod = _join_mod('engines', engine_type, 'engine')
|
||||||
@@ -433,23 +522,43 @@ def _complete_engine_list(engines):
|
|||||||
return engine_mods
|
return engine_mods
|
||||||
|
|
||||||
|
|
||||||
def _complete_flow_list(flows):
|
def _complete_flows(patterns):
|
||||||
if not flows:
|
if not patterns:
|
||||||
return []
|
return []
|
||||||
flow_mods = []
|
pattern_mods = [
|
||||||
for flow in flows:
|
'patterns',
|
||||||
flow = flow.strip()
|
]
|
||||||
if not flow:
|
for p in patterns:
|
||||||
|
p = p.strip()
|
||||||
|
if not p or p in pattern_mods:
|
||||||
continue
|
continue
|
||||||
mod = _join_mod('patterns', flow)
|
mod = _join_mod('patterns', p)
|
||||||
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
|
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
|
||||||
if not exists:
|
if not exists:
|
||||||
raise IOError("Flow %s file not found at: %s" % (flow, mod_path))
|
raise IOError("Flow pattern %s file not found at: %s"
|
||||||
if flow in BLACK_LISTED:
|
% (p, mod_path))
|
||||||
raise IOError("Flow %s is currently disallowed until further"
|
pattern_mods.append(mod)
|
||||||
" notice" % (flow))
|
return pattern_mods
|
||||||
flow_mods.append(mod)
|
|
||||||
return flow_mods
|
|
||||||
|
def _complete_persistence(backends):
|
||||||
|
if not backends:
|
||||||
|
return []
|
||||||
|
backend_mods = [
|
||||||
|
'persistence',
|
||||||
|
'persistence.logbook',
|
||||||
|
]
|
||||||
|
for b in backends:
|
||||||
|
b = b.strip()
|
||||||
|
if not b or b in backend_mods:
|
||||||
|
continue
|
||||||
|
mod = _join_mod("persistence", "backends", b)
|
||||||
|
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
|
||||||
|
if not exists:
|
||||||
|
raise IOError("Persistence backend %s file not found at: %s"
|
||||||
|
% (b, mod_path))
|
||||||
|
backend_mods.append(mod)
|
||||||
|
return backend_mods
|
||||||
|
|
||||||
|
|
||||||
def _is_prefix_of(prefix_text, haystack):
|
def _is_prefix_of(prefix_text, haystack):
|
||||||
@@ -485,21 +594,21 @@ def _find_existing(mod, base, dest_dir):
|
|||||||
|
|
||||||
|
|
||||||
def _uniq_itr(itr):
|
def _uniq_itr(itr):
|
||||||
seen = []
|
seen = set()
|
||||||
for i in itr:
|
for i in itr:
|
||||||
if i in seen:
|
if i in seen:
|
||||||
continue
|
continue
|
||||||
seen.append(i)
|
seen.add(i)
|
||||||
yield i
|
yield i
|
||||||
|
|
||||||
|
|
||||||
def _rm_tree(base):
|
def _rm_tree(base):
|
||||||
dirpaths = []
|
dirpaths = []
|
||||||
for dirpath, _tmp, filenames in os.walk(base):
|
for dirpath, _tmp, filenames in os.walk(base):
|
||||||
print(" '%s' (X)" % (dirpath))
|
LOG.debug(" '%s' (X)", dirpath)
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
filepath = os.path.join(dirpath, filename)
|
filepath = os.path.join(dirpath, filename)
|
||||||
print(" '%s' (X)" % (filepath))
|
LOG.debug(" '%s' (X)", filepath)
|
||||||
os.unlink(filepath)
|
os.unlink(filepath)
|
||||||
dirpaths.append(dirpath)
|
dirpaths.append(dirpath)
|
||||||
for d in reversed(dirpaths):
|
for d in reversed(dirpaths):
|
||||||
@@ -508,6 +617,8 @@ def _rm_tree(base):
|
|||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
conf = _parse_args(argv)
|
conf = _parse_args(argv)
|
||||||
|
script_base = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
_configure_logging(conf)
|
||||||
|
|
||||||
dest_dir = conf.dest_dir
|
dest_dir = conf.dest_dir
|
||||||
if not dest_dir and conf.config_file:
|
if not dest_dir and conf.config_file:
|
||||||
@@ -527,19 +638,13 @@ def main(argv):
|
|||||||
p = ''
|
p = ''
|
||||||
p_type = p_type.strip()
|
p_type = p_type.strip()
|
||||||
p = p.strip()
|
p = p.strip()
|
||||||
|
if not p_type:
|
||||||
|
continue
|
||||||
if p not in primitive_types[p_type]:
|
if p not in primitive_types[p_type]:
|
||||||
primitive_types[p_type].append(p)
|
primitive_types[p_type].append(p)
|
||||||
|
|
||||||
# TODO(harlowja): for now these are the only primitives we are allowing to
|
# TODO(harlowja): for now these are the only primitives we are allowing to
|
||||||
# be copied over. Later add more as needed.
|
# be copied over. Later add more as needed.
|
||||||
prims = 0
|
|
||||||
for k in ALLOWED_PRIMITIVES:
|
|
||||||
prims += len(primitive_types.get(k, []))
|
|
||||||
if prims <= 0:
|
|
||||||
allowed = ", ".join(sorted(ALLOWED_PRIMITIVES))
|
|
||||||
print("A list of primitives to copy is required "
|
|
||||||
"(%s is allowed)" % (allowed), file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
unknown_prims = []
|
unknown_prims = []
|
||||||
for k in primitive_types.keys():
|
for k in primitive_types.keys():
|
||||||
if k not in ALLOWED_PRIMITIVES:
|
if k not in ALLOWED_PRIMITIVES:
|
||||||
@@ -555,10 +660,15 @@ def main(argv):
|
|||||||
print("A destination base module is required", file=sys.stderr)
|
print("A destination base module is required", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
base_dir = os.path.join(dest_dir, conf.base)
|
||||||
|
|
||||||
def copy_mods(mod_list, root_mods):
|
def copy_mods(mod_list, root_mods):
|
||||||
common_already = {}
|
common_already = {}
|
||||||
missing_common = set()
|
missing_common = set()
|
||||||
for mod in list(sorted(mod_list)):
|
# Take out the openstack.common modules that exist already in the
|
||||||
|
# containing project.
|
||||||
|
mod_list = list(mod_list)
|
||||||
|
for mod in list(mod_list):
|
||||||
# NOTE(harlowja): attempt to use the modules being copied to common
|
# NOTE(harlowja): attempt to use the modules being copied to common
|
||||||
# folder as much as possible for modules that are needed for
|
# folder as much as possible for modules that are needed for
|
||||||
# taskflow as this avoids duplicating openstack.common in the
|
# taskflow as this avoids duplicating openstack.common in the
|
||||||
@@ -570,39 +680,146 @@ def main(argv):
|
|||||||
mod_list.remove(mod)
|
mod_list.remove(mod)
|
||||||
else:
|
else:
|
||||||
missing_common.add(mod)
|
missing_common.add(mod)
|
||||||
there_common_mod = _join_mod(conf.base, 'openstack', 'common')
|
LOG.info("Copying %s modules into '%s'", len(mod_list), base_dir)
|
||||||
|
for m in mod_list:
|
||||||
|
LOG.info(" - %s", m)
|
||||||
if common_already:
|
if common_already:
|
||||||
print("The following modules will be used from the containing"
|
LOG.info("The following modules will be used from the containing"
|
||||||
" projects '%s'" % (there_common_mod))
|
" projects 'openstack.common'")
|
||||||
for mod in sorted(common_already.keys()):
|
for mod in sorted(common_already.keys()):
|
||||||
target_mod = common_already[mod]
|
target_mod = common_already[mod]
|
||||||
print(" '%s' -> '%s'" % (mod, target_mod))
|
LOG.info(" '%s' -> '%s'", mod, target_mod)
|
||||||
if missing_common:
|
if missing_common:
|
||||||
print("The following modules will *not* be used from the"
|
LOG.info("The following modules will *not* be used from the"
|
||||||
" containing projects '%s'" % (there_common_mod))
|
" containing projects 'openstack.common'")
|
||||||
for mod in sorted(missing_common):
|
for mod in sorted(missing_common):
|
||||||
print(" '%s'" % (mod))
|
LOG.info(" - %s", mod)
|
||||||
for mod in _uniq_itr(sorted(mod_list)):
|
copied = set()
|
||||||
_copy_mod(mod, conf.base, dest_dir,
|
for mod in mod_list:
|
||||||
common_already=common_already,
|
copied.add(_copy_mod(mod, conf.base, dest_dir,
|
||||||
root_mods=root_mods)
|
common_already=common_already,
|
||||||
|
root_mods=root_mods))
|
||||||
|
LOG.debug("Copied %s modules", len(copied))
|
||||||
|
for m in sorted(copied):
|
||||||
|
LOG.debug(" - %s", m)
|
||||||
|
|
||||||
def clean_old():
|
def clean_old():
|
||||||
old_base = os.path.join(dest_dir, conf.base, BASE_MOD)
|
old_base = os.path.join(dest_dir, conf.base, BASE_MOD)
|
||||||
if os.path.isdir(old_base):
|
if os.path.isdir(old_base):
|
||||||
print("Removing old %s tree found at '%s'" % (BASE_MOD, old_base))
|
LOG.info("Removing old %s tree found at '%s'", BASE_MOD, old_base)
|
||||||
_rm_tree(old_base)
|
_rm_tree(old_base)
|
||||||
|
|
||||||
find_what = _complete_flow_list(primitive_types.pop('flow', []))
|
def create_entrypoints(mod_list, root_mods):
|
||||||
find_what.extend(_complete_engine_list(primitive_types.get('engines', [])))
|
needed_entrypoints = set()
|
||||||
|
for k in REQUIRES_ENTRYPOINTS.keys():
|
||||||
|
for m in mod_list:
|
||||||
|
if m.startswith(k):
|
||||||
|
needed_entrypoints.add(k)
|
||||||
|
if not needed_entrypoints:
|
||||||
|
return
|
||||||
|
# Alter the source code locations that have the entry point name.
|
||||||
|
LOG.info("Altering %s entrypoint referencing modules:",
|
||||||
|
len(needed_entrypoints))
|
||||||
|
for m in sorted(needed_entrypoints):
|
||||||
|
LOG.info(" - %s", m)
|
||||||
|
entrypoints_adjusted = set()
|
||||||
|
for k in sorted(needed_entrypoints):
|
||||||
|
entrypoint_details = REQUIRES_ENTRYPOINTS[k]
|
||||||
|
entrypoint_target = entrypoint_details['target_mod']
|
||||||
|
there_entrypoint = (entrypoint_details['replacement'] % conf.base)
|
||||||
|
if entrypoint_target in entrypoints_adjusted:
|
||||||
|
continue
|
||||||
|
base_mod_path = root_mods.get(entrypoint_target)
|
||||||
|
if not base_mod_path:
|
||||||
|
existing_mod = _find_existing(entrypoint_target,
|
||||||
|
BASE_MOD, base_dir)
|
||||||
|
if existing_mod:
|
||||||
|
base_mod_path = _mod_to_path(existing_mod) + ".py"
|
||||||
|
if not base_mod_path:
|
||||||
|
raise IOError("Could not find entrypoint target %s" %
|
||||||
|
entrypoint_target)
|
||||||
|
dest_path = os.path.join(base_dir, base_mod_path)
|
||||||
|
if not os.path.isfile(dest_path):
|
||||||
|
raise IOError("Could not find entrypoint file %s" %
|
||||||
|
dest_path)
|
||||||
|
LOG.debug("Adjusting '%s' in '%s'", entrypoint_details['replace'],
|
||||||
|
dest_path)
|
||||||
|
pattern = r"%s\s*=.*" % (entrypoint_details['replace'])
|
||||||
|
replacement = entrypoint_details['replace']
|
||||||
|
replacement += " = '%s'" % (there_entrypoint)
|
||||||
|
LOG.debug("Replacing '%s' -> '%s'", pattern, replacement)
|
||||||
|
_bulk_replace(dest_path, pattern, replacement)
|
||||||
|
entrypoints_adjusted.add(entrypoint_target)
|
||||||
|
if not entrypoints_adjusted:
|
||||||
|
return
|
||||||
|
# Adjust there entrypoint configuration file (if it exists).
|
||||||
|
cfg_filename = os.path.join(dest_dir, "setup.cfg")
|
||||||
|
my_cfg_filename = os.path.join(script_base, 'setup.cfg')
|
||||||
|
LOG.debug("Adjusting entrypoint configuration in '%s' with entrypoints"
|
||||||
|
" from '%s'", cfg_filename, my_cfg_filename)
|
||||||
|
# Clear out there old entry points for taskflow
|
||||||
|
there_cfg = ConfigParser.RawConfigParser()
|
||||||
|
there_cfg.read([cfg_filename])
|
||||||
|
there_exists = os.path.isfile(cfg_filename)
|
||||||
|
for k in entrypoints_adjusted:
|
||||||
|
entrypoint_details = REQUIRES_ENTRYPOINTS[k]
|
||||||
|
entrypoint = entrypoint_details['entrypoint']
|
||||||
|
there_entrypoint = (entrypoint_details['replacement'] % conf.base)
|
||||||
|
try:
|
||||||
|
there_cfg.remove_option('entry_points', there_entrypoint)
|
||||||
|
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||||
|
pass
|
||||||
|
# Copy and modify my entry points into there entrypoints.
|
||||||
|
my_cfg = ConfigParser.RawConfigParser()
|
||||||
|
my_cfg.read([my_cfg_filename])
|
||||||
|
for k in sorted(entrypoints_adjusted):
|
||||||
|
entrypoint_details = REQUIRES_ENTRYPOINTS[k]
|
||||||
|
entrypoint = entrypoint_details['entrypoint']
|
||||||
|
there_entrypoint = (entrypoint_details['replacement'] % conf.base)
|
||||||
|
my_entries = my_cfg.get('entry_points', entrypoint)
|
||||||
|
there_entries = []
|
||||||
|
for line in my_entries.splitlines():
|
||||||
|
# NOTE(harlowja): only take the entrypoints that are relevant
|
||||||
|
# for the desired module list, skip the ones that are not.
|
||||||
|
if _take_entrypoint_line(line, mod_list):
|
||||||
|
new_line = re.sub(entrypoint, there_entrypoint, line)
|
||||||
|
there_entries.append(new_line)
|
||||||
|
try:
|
||||||
|
there_cfg.add_section('entry_points')
|
||||||
|
except ConfigParser.DuplicateSectionError:
|
||||||
|
pass
|
||||||
|
entry_value = os.linesep.join(there_entries)
|
||||||
|
there_cfg.set('entry_points', there_entrypoint, entry_value)
|
||||||
|
LOG.debug("Added entrypoint '%s'", there_entrypoint)
|
||||||
|
for line in there_entries:
|
||||||
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
LOG.debug(">> %s", line)
|
||||||
|
# ConfigParser seems to use tabs, instead of spaces, why!
|
||||||
|
buf = six.StringIO()
|
||||||
|
there_cfg.write(buf)
|
||||||
|
contents = buf.getvalue()
|
||||||
|
if contents.find("\t") != -1:
|
||||||
|
contents = contents.replace("\t", " " * 4)
|
||||||
|
if contents.find(" \n") != -1:
|
||||||
|
contents = contents.replace(' \n', '\n')
|
||||||
|
with open(cfg_filename, "wb") as fh:
|
||||||
|
fh.write(contents)
|
||||||
|
_warn_entrypoint(cfg_filename, there_exists)
|
||||||
|
|
||||||
|
find_what = _complete_flows(primitive_types.pop('flow', []))
|
||||||
|
find_what.extend(_complete_engines(primitive_types.get('engines')))
|
||||||
|
find_what.extend(_complete_persistence(primitive_types.get('persistence')))
|
||||||
find_what.extend(primitive_types.keys())
|
find_what.extend(primitive_types.keys())
|
||||||
find_what = [f for f in _uniq_itr(find_what)]
|
find_what = [f for f in _uniq_itr(find_what)]
|
||||||
copy_what, root_mods = _complete_module_list(find_what)
|
copy_what, root_mods = _complete_module_list(find_what)
|
||||||
|
copy_what = sorted([m for m in _uniq_itr(copy_what)])
|
||||||
if copy_what:
|
if copy_what:
|
||||||
clean_old()
|
clean_old()
|
||||||
copy_mods([m for m in _uniq_itr(copy_what)], root_mods)
|
copy_mods(copy_what, root_mods)
|
||||||
|
create_entrypoints(copy_what, root_mods)
|
||||||
else:
|
else:
|
||||||
print("Nothing to copy.")
|
print("Nothing to copy.", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
Reference in New Issue
Block a user