start refactoring validation code

We need to expand the validation code to include branches. Rather than
continuing to grow the logic in one very large function, start splitting
it up.

Change-Id: Ie8b3fe7c0ce257bb9920221d45ce6e963de0944e
Signed-off-by: Doug Hellmann <doug@doughellmann.com>
This commit is contained in:
Doug Hellmann
2016-11-23 14:51:04 -05:00
parent 972dcee54c
commit b0c2149d48

View File

@@ -64,57 +64,9 @@ def is_a_hash(val):
return re.search('^[a-f0-9]{40}$', val, re.I) is not None return re.search('^[a-f0-9]{40}$', val, re.I) is not None
def main(): def validate_metadata(deliverable_info, filename, team_data, warnings, errors):
parser = argparse.ArgumentParser() """Look at the general metadata in the deliverable file.
parser.add_argument( """
'--no-cleanup',
dest='cleanup',
default=True,
action='store_false',
help='do not remove temporary files',
)
parser.add_argument(
'input',
nargs='*',
help=('YAML files to validate, defaults to '
'files changed in the latest commit'),
)
args = parser.parse_args()
filenames = args.input or gitutils.find_modified_deliverable_files()
if not filenames:
print('no modified deliverable files, validating all releases from %s'
% defaults.RELEASE)
filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml')
zuul_layout = project_config.get_zuul_layout_data()
team_data = governance.get_team_data()
errors = []
warnings = []
workdir = tempfile.mkdtemp(prefix='releases-')
print('creating temporary files in %s' % workdir)
def cleanup_workdir():
if args.cleanup:
try:
shutil.rmtree(workdir)
except:
pass
else:
print('not cleaning up %s' % workdir)
atexit.register(cleanup_workdir)
for filename in filenames:
print('\nChecking %s' % filename)
if not os.path.isfile(filename):
print("File was deleted, skipping.")
continue
with open(filename, 'r') as f:
deliverable_info = yaml.load(f.read())
# Look for the launchpad project # Look for the launchpad project
try: try:
lp_name = deliverable_info['launchpad'] lp_name = deliverable_info['launchpad']
@@ -138,9 +90,6 @@ def main():
warnings.append('Team %r in %s not in governance data' % warnings.append('Team %r in %s not in governance data' %
(deliverable_info['team'], filename)) (deliverable_info['team'], filename))
# Look for the release-type
release_type = deliverable_info.get('release-type', 'std')
# Make sure the release notes page exists, if it is specified. # Make sure the release notes page exists, if it is specified.
if 'release-notes' in deliverable_info: if 'release-notes' in deliverable_info:
notes_link = deliverable_info['release-notes'] notes_link = deliverable_info['release-notes']
@@ -160,25 +109,6 @@ def main():
else: else:
print('no release-notes specified') print('no release-notes specified')
series_name = os.path.basename(
os.path.dirname(filename)
)
# Determine the release model. Don't require independent
# projects to redundantly specify that they are independent by
# including the value in their deliverablefile, but everyone
# else must provide a valid value.
is_independent = (series_name == '_independent')
if is_independent:
release_model = 'independent'
else:
release_model = deliverable_info.get('release-model', 'UNSPECIFIED')
if release_model not in _VALID_MODELS:
errors.append(
'Unknown release model %r for %s, must be one of %r' %
(release_model, filename, sorted(list(_VALID_MODELS)))
)
# Determine the deliverable type. Require an explicit value. # Determine the deliverable type. Require an explicit value.
deliverable_type = deliverable_info.get('type') deliverable_type = deliverable_info.get('type')
if not deliverable_type: if not deliverable_type:
@@ -192,13 +122,39 @@ def main():
(deliverable_type, filename, sorted(list(_VALID_TYPES))) (deliverable_type, filename, sorted(list(_VALID_TYPES)))
) )
def validate_releases(deliverable_info, zuul_layout,
series_name, filename,
workdir,
warnings, errors):
"""Apply validation rules to the 'releases' list for the deliverable.
"""
# Determine the release model. Don't require independent
# projects to redundantly specify that they are independent by
# including the value in their deliverablefile, but everyone
# else must provide a valid value.
is_independent = (series_name == '_independent')
if is_independent:
release_model = 'independent'
else:
release_model = deliverable_info.get('release-model',
'UNSPECIFIED')
if release_model not in _VALID_MODELS:
errors.append(
'Unknown release model %r for %s, must be one of %r' %
(release_model, filename, sorted(list(_VALID_MODELS)))
)
# Remember which entries are new so we can verify that they # Remember which entries are new so we can verify that they
# appear at the end of the file. # appear at the end of the file.
new_releases = {} new_releases = {}
release_type = deliverable_info.get('release-type', 'std')
link_mode = deliverable_info.get('artifact-link-mode', 'tarball')
prev_version = None prev_version = None
prev_projects = set() prev_projects = set()
link_mode = deliverable_info.get('artifact-link-mode', 'tarball')
for release in deliverable_info['releases']: for release in deliverable_info['releases']:
for project in release['projects']: for project in release['projects']:
@@ -350,9 +306,15 @@ def main():
print(msg) print(msg)
errors.append(msg) errors.append(msg)
def validate_new_releases(deliverable_info, filename, series_name,
team_data,
warnings, errors):
"""Apply validation rules that only apply to the current series.
"""
# Some rules only apply to the most current release. # Some rules only apply to the most current release.
if series_name != defaults.RELEASE: if series_name != defaults.RELEASE:
continue return
# Rules for only the current release cycle. # Rules for only the current release cycle.
final_release = deliverable_info['releases'][-1] final_release = deliverable_info['releases'][-1]
@@ -364,6 +326,7 @@ def main():
deliverable_name=deliverable_name, deliverable_name=deliverable_name,
) )
) )
link_mode = deliverable_info.get('artifact-link-mode', 'tarball')
if link_mode != 'none' and not expected_repos: if link_mode != 'none' and not expected_repos:
msg = ('unable to find deliverable %s in the governance list' % msg = ('unable to find deliverable %s in the governance list' %
deliverable_name) deliverable_name)
@@ -389,6 +352,87 @@ def main():
print(msg) print(msg)
warnings.append(msg) warnings.append(msg)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--no-cleanup',
dest='cleanup',
default=True,
action='store_false',
help='do not remove temporary files',
)
parser.add_argument(
'input',
nargs='*',
help=('YAML files to validate, defaults to '
'files changed in the latest commit'),
)
args = parser.parse_args()
filenames = args.input or gitutils.find_modified_deliverable_files()
if not filenames:
print('no modified deliverable files, validating all releases from %s'
% defaults.RELEASE)
filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml')
zuul_layout = project_config.get_zuul_layout_data()
team_data = governance.get_team_data()
errors = []
warnings = []
workdir = tempfile.mkdtemp(prefix='releases-')
print('creating temporary files in %s' % workdir)
def cleanup_workdir():
if args.cleanup:
try:
shutil.rmtree(workdir)
except:
pass
else:
print('not cleaning up %s' % workdir)
atexit.register(cleanup_workdir)
for filename in filenames:
print('\nChecking %s' % filename)
if not os.path.isfile(filename):
print("File was deleted, skipping.")
continue
with open(filename, 'r') as f:
deliverable_info = yaml.load(f.read())
series_name = os.path.basename(
os.path.dirname(filename)
)
validate_metadata(
deliverable_info,
filename,
team_data,
warnings,
errors,
)
validate_releases(
deliverable_info,
zuul_layout,
series_name,
filename,
workdir,
warnings,
errors,
)
validate_new_releases(
deliverable_info,
filename,
series_name,
team_data,
warnings,
errors,
)
if warnings: if warnings:
print('\n\n%s warnings found' % len(warnings)) print('\n\n%s warnings found' % len(warnings))
for w in warnings: for w in warnings: