Allow engines to be copied + blacklist broken flows

Allow engines to be copied to integrated projects via
update.py and for the time being blacklist threaded and
graph flow from being copied (to avoid breakage) until
the bugs associated with fixing those flows are fixed.

Also a bug was found that would not allow copying over
the utils module (since a utils.py file no longer
exists) so ensure that when we encounter a module (and
not a .py file) we can copy over the correct file that
belongs to that module (in the case of utils this would
be the __init__.py of the utils folder).

Also fixes a bug where it appears that due to our usage
of defaultdict we would copy over more modules than
desired due to how 'reading' a key in a defaultdict
actually creates that key using the default value which
caused these extra module inclusions.

Change-Id: I2aa732fc0baf269ee87aa72bfe8495e51ad21faf
This commit is contained in:
Joshua Harlow
2013-09-07 01:36:47 -07:00
parent 37295b71a9
commit 5a8fd8d455

View File

@@ -88,12 +88,22 @@ OPTS = [
help='A config file or destination project directory',
positional=True),
]
ALLOWED_PRIMITIVES = ('flow', 'task', 'decorators')
ALLOWED_PRIMITIVES = (
'flow',
'task',
'decorators',
'storage',
'engines',
'exceptions',
)
IMPORT_FROM = re.compile(r"^\s*from\s+" + BASE_MOD + r"\s*(.*)$")
BASE_CONF = '%s.conf' % (BASE_MOD)
MACHINE_GENERATED = ('# DO NOT EDIT THIS FILE BY HAND -- YOUR CHANGES WILL BE '
'OVERWRITTEN', '')
# FIXME(harlowja): remove these after bugs #1221448 and #1221505 are fixed
BLACK_LISTED = ('threaded_flow', 'graph_flow')
def _parse_args(argv):
conf = cfg.ConfigOpts()
@@ -340,7 +350,7 @@ def _parse_import_line(line, linenum=-1, filename=None):
return (comment, prefix, postfix, alias)
def _find_import_modules(srcfile):
def _find_import_modules(srcfile, root_mods):
with open(srcfile, 'rb') as f:
lines = f.readlines()
for (i, line) in enumerate(lines):
@@ -349,16 +359,19 @@ def _find_import_modules(srcfile):
continue
(comment, prefix, postfix, alias) = segments
importing = _join_mod(prefix, postfix)
if importing in root_mods.keys():
yield importing
continue
# Attempt to locate where the module is by popping import
# segments until we find one that actually exists.
import_segments = _split_mod(importing)
prefix_segments = _split_mod(prefix)
while len(import_segments):
# Attempt to locate where the module is by popping import
# segments until we find one that actually exists.
exists, _mod_path = _get_mod_path(import_segments, base=BASE_MOD)
if exists:
break
else:
import_segments.pop()
prefix_segments = _split_mod(prefix)
if not import_segments or len(import_segments) < len(prefix_segments):
raise IOError("Unable to find import '%s'; line %s from file"
" '%s'" % (importing, i + 1, srcfile))
@@ -368,6 +381,7 @@ def _find_import_modules(srcfile):
def _build_dependency_tree():
dep_tree = {}
root_mods = {}
file_paths = []
for dirpath, _tmp, filenames in os.walk(BASE_MOD):
for filename in [x for x in filenames if x.endswith('.py')]:
if dirpath == BASE_MOD:
@@ -384,9 +398,14 @@ def _build_dependency_tree():
root_mods[mod_name] = os.path.join(dirpath, filename)
else:
filepath = os.path.join(dirpath, filename)
dep_list = dep_tree.setdefault(mod_name, [])
dep_list.extend([x for x in _find_import_modules(filepath)
if x != mod_name and x not in dep_list])
file_paths.append((filepath, mod_name))
# Analyze the individual files dependencies after we know exactly what the
# modules are so that we can find those modules if a individual file
# imports a module instead of a file.
for filepath, mod_name in file_paths:
dep_list = dep_tree.setdefault(mod_name, [])
dep_list.extend([x for x in _find_import_modules(filepath, root_mods)
if x != mod_name and x not in dep_list])
return (dep_tree, root_mods)
@@ -398,21 +417,40 @@ def _dfs_dependency_tree(dep_tree, mod_name, mod_list=[]):
return mod_list
def _complete_flow_list(flows):
def _complete_engine_list(engines):
if not engines:
return []
engine_mods = []
for engine_type in engines:
engine_type = engine_type.strip()
if not engine_type:
continue
engine_mods.append(_join_mod('engines', engine_type))
mod = _join_mod('engines', engine_type, 'engine')
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
if not exists:
raise IOError("Engine %s file not found at: %s" % (engine_type,
mod_path))
engine_mods.append(mod)
return engine_mods
def check_fetch_mod(flow):
def _complete_flow_list(flows):
if not flows:
return []
flow_mods = []
for flow in flows:
flow = flow.strip()
if not flow:
continue
mod = _join_mod('patterns', flow)
exists, mod_path = _get_mod_path([mod], base=BASE_MOD)
if not exists:
raise IOError("Flow %s file not found at: %s" % (flow, mod_path))
return mod
flow_mods = []
for f in flows:
f = f.strip()
if not f:
continue
flow_mods.append(check_fetch_mod(f))
if flow in BLACK_LISTED:
raise IOError("Flow %s is currently disallowed until further"
" notice" % (flow))
flow_mods.append(mod)
return flow_mods
@@ -498,7 +536,7 @@ def main(argv):
# be copied over. Later add more as needed.
prims = 0
for k in ALLOWED_PRIMITIVES:
prims += len(primitive_types[k])
prims += len(primitive_types.get(k, []))
if prims <= 0:
allowed = ", ".join(sorted(ALLOWED_PRIMITIVES))
print("A list of primitives to copy is required "
@@ -558,6 +596,7 @@ def main(argv):
_rm_tree(old_base)
find_what = _complete_flow_list(primitive_types.pop('flow', []))
find_what.extend(_complete_engine_list(primitive_types.get('engines', [])))
find_what.extend(primitive_types.keys())
find_what = [f for f in _uniq_itr(find_what)]
copy_what, root_mods = _complete_module_list(find_what)