Report config errors when removing cross-repo jobs
Zuul can operate with a broken configuration (ie, a config which references objects which do not exist). To avoid leaving misleading error messages on proposed config changes, we suppress reports of errors which did not originate in the same repo-branch of the change. However, this means that a change which removes a job defined in one repo which is still used in another repo will not report an error (because the usage of the job is in a repo other than the one to which the change is being proposed). To correct this, we attempt to uniquely identify errors which are already present in the layout (whether that layout is one generated for a parent change, or the actual running layout), and avoid reporting those errors to the user. In other words, we attempt only to report new errors generated by the proposed layout. This isn't perfect, as it relies on a bevy of heuristics to make a unique key for each error, but that should generally work for this purpose. However, as a special case, we also do still report any errors for the current project (as in that case, we're very likely to end up with duplicate keys as someone attempts to correct an existing error). This adds a new test for the desired behavior. This also removes the length restriction on the number of errors we store, as we need to keep all of them in order to be able to compare their keys. However, we still only report one error to the user, and we limit the number of errors that we log. The LoadingErrors class is slightly more formally structured now to enable more explicit arguments. Change-Id: I885a2d3ab6d60402425b879e639c1e597717622a
This commit is contained in:
parent
d09e46e91a
commit
050c640b0b
|
@ -0,0 +1,2 @@
|
|||
- hosts: all
|
||||
tasks: []
|
|
@ -15,6 +15,16 @@
|
|||
name: base
|
||||
parent: null
|
||||
|
||||
- job:
|
||||
name: central-test
|
||||
run: playbooks/job.yaml
|
||||
|
||||
- project:
|
||||
name: common-config
|
||||
check:
|
||||
jobs:
|
||||
- noop
|
||||
|
||||
- project:
|
||||
name: org/project2
|
||||
check:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
- project:
|
||||
check:
|
||||
jobs: []
|
||||
jobs:
|
||||
- central-test
|
||||
|
|
|
@ -2460,7 +2460,7 @@ class TestBrokenConfig(ZuulTestCase):
|
|||
"An error should have been stored")
|
||||
self.assertIn(
|
||||
"Zuul encountered a syntax error",
|
||||
str(tenant.layout.loading_errors[0][1]))
|
||||
str(tenant.layout.loading_errors[0].error))
|
||||
|
||||
def test_dynamic_ignore(self):
|
||||
# Verify dynamic config behaviors inside a tenant broken config
|
||||
|
@ -2594,6 +2594,54 @@ class TestBrokenConfig(ZuulTestCase):
|
|||
self.assertHistory([
|
||||
dict(name='project-test2', result='SUCCESS', changes='1,1')])
|
||||
|
||||
def test_dynamic_fail_cross_repo(self):
|
||||
# Verify dynamic config behaviors inside a tenant broken config
|
||||
tenant = self.sched.abide.tenants.get('tenant-one')
|
||||
# There is a configuration error
|
||||
self.assertEquals(
|
||||
len(tenant.layout.loading_errors), 1,
|
||||
"An error should have been stored")
|
||||
|
||||
# Inside a broken tenant configuration environment, remove a
|
||||
# job used in another repo and verify that an error is
|
||||
# reported despite the error being in a repo other than the
|
||||
# change.
|
||||
in_repo_conf = textwrap.dedent(
|
||||
"""
|
||||
- pipeline:
|
||||
name: check
|
||||
manager: independent
|
||||
trigger:
|
||||
gerrit:
|
||||
- event: patchset-created
|
||||
success:
|
||||
gerrit:
|
||||
Verified: 1
|
||||
failure:
|
||||
gerrit:
|
||||
Verified: -1
|
||||
- job:
|
||||
name: base
|
||||
parent: null
|
||||
|
||||
- project:
|
||||
name: common-config
|
||||
check:
|
||||
jobs:
|
||||
- noop
|
||||
""")
|
||||
|
||||
file_dict = {'zuul.yaml': in_repo_conf}
|
||||
A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A',
|
||||
files=file_dict)
|
||||
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
|
||||
self.waitUntilSettled()
|
||||
self.assertEqual(A.reported, 1,
|
||||
"A should report failure")
|
||||
self.assertEqual(A.patchsets[0]['approvals'][0]['value'], "-1")
|
||||
self.assertIn('Job central-test not defined', A.messages[0],
|
||||
"A should have failed the check pipeline")
|
||||
|
||||
|
||||
class TestProjectKeys(ZuulTestCase):
|
||||
# Test that we can generate project keys
|
||||
|
|
|
@ -233,7 +233,7 @@ def configuration_exceptions(stanza, conf, accumulator):
|
|||
content=indent(start_mark.snippet.rstrip()),
|
||||
start_mark=str(start_mark))
|
||||
|
||||
accumulator.append((context, m))
|
||||
accumulator.addError(context, start_mark, m)
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
@ -269,7 +269,7 @@ def reference_exceptions(stanza, obj, accumulator):
|
|||
content=indent(start_mark.snippet.rstrip()),
|
||||
start_mark=str(start_mark))
|
||||
|
||||
accumulator.append((context, m))
|
||||
accumulator.addError(context, start_mark, m)
|
||||
|
||||
|
||||
class ZuulMark(object):
|
||||
|
@ -1277,7 +1277,7 @@ class TenantParser(object):
|
|||
self._resolveShadowProjects(tenant, tpc)
|
||||
|
||||
# We prepare a stack to store config loading issues
|
||||
loading_errors = model.LoadingErrors(length=model.MAX_ERROR_LENGTH)
|
||||
loading_errors = model.LoadingErrors()
|
||||
|
||||
# Start by fetching any YAML needed by this tenant which isn't
|
||||
# already cached. Full reconfigurations start with an empty
|
||||
|
@ -1572,7 +1572,7 @@ class TenantParser(object):
|
|||
r = safe_load_yaml(data, source_context)
|
||||
config.extend(r)
|
||||
except ConfigurationSyntaxError as e:
|
||||
loading_errors.append((source_context, e))
|
||||
loading_errors.addError(source_context, None, e)
|
||||
return config
|
||||
|
||||
def filterConfigProjectYAML(self, data):
|
||||
|
@ -1601,8 +1601,9 @@ class TenantParser(object):
|
|||
try:
|
||||
pcontext.pragma_parser.fromYaml(config_pragma)
|
||||
except ConfigurationSyntaxError as e:
|
||||
loading_errors.append(
|
||||
(config_pragma['_source_context'], e))
|
||||
loading_errors.addError(
|
||||
config_pragma['_source_context'],
|
||||
config_pragma['_start_mark'], e)
|
||||
|
||||
for config_pipeline in unparsed_config.pipelines:
|
||||
classes = self._getLoadClasses(tenant, config_pipeline)
|
||||
|
@ -1893,8 +1894,8 @@ class ConfigLoader(object):
|
|||
"configuration loading" % (
|
||||
len(tenant.layout.loading_errors), tenant.name))
|
||||
# Log accumulated errors
|
||||
for err in tenant.layout.loading_errors.errors:
|
||||
self.log.warning(str(err[1]))
|
||||
for err in tenant.layout.loading_errors.errors[:10]:
|
||||
self.log.warning(err.error)
|
||||
return abide
|
||||
|
||||
def reloadTenant(self, project_key_dir, abide, tenant):
|
||||
|
@ -1915,8 +1916,8 @@ class ConfigLoader(object):
|
|||
"configuration re-loading" % (
|
||||
len(new_tenant.layout.loading_errors), tenant.name))
|
||||
# Log accumulated errors
|
||||
for err in new_tenant.layout.loading_errors.errors:
|
||||
self.log.warning(str(err[1]))
|
||||
for err in new_tenant.layout.loading_errors.errors[:10]:
|
||||
self.log.warning(err.error)
|
||||
return new_abide
|
||||
|
||||
def _loadDynamicProjectData(self, config, project,
|
||||
|
@ -1983,7 +1984,7 @@ class ConfigLoader(object):
|
|||
def createDynamicLayout(self, tenant, files,
|
||||
include_config_projects=False,
|
||||
scheduler=None, connections=None):
|
||||
loading_errors = model.LoadingErrors(length=model.MAX_ERROR_LENGTH)
|
||||
loading_errors = model.LoadingErrors()
|
||||
if include_config_projects:
|
||||
config = model.ParsedConfig()
|
||||
for project in tenant.config_projects:
|
||||
|
|
|
@ -419,6 +419,15 @@ class PipelineManager(object):
|
|||
self.sched.connections, self.sched, None)
|
||||
|
||||
self.log.debug("Loading dynamic layout")
|
||||
|
||||
parent_layout = None
|
||||
parent = item.item_ahead
|
||||
while not parent_layout and parent:
|
||||
parent_layout = parent.layout
|
||||
parent = parent.item_ahead
|
||||
if not parent_layout:
|
||||
parent_layout = item.pipeline.tenant.layout
|
||||
|
||||
(trusted_updates, untrusted_updates) = item.includesConfigUpdates()
|
||||
build_set = item.current_build_set
|
||||
trusted_layout_verified = False
|
||||
|
@ -472,11 +481,13 @@ class PipelineManager(object):
|
|||
# the current item.change and only report
|
||||
# if one is found.
|
||||
for err in layout.loading_errors.errors:
|
||||
context = err[0]
|
||||
if context.project.name == item.change.project.name:
|
||||
if context.branch == item.change.branch:
|
||||
item.setConfigError(str(err[1]))
|
||||
return None
|
||||
econtext = err.key.context
|
||||
if ((err.key not in
|
||||
parent_layout.loading_errors.error_keys) or
|
||||
(econtext.project == item.change.project.name and
|
||||
econtext.branch == item.change.branch)):
|
||||
item.setConfigError(err.error)
|
||||
return None
|
||||
self.log.info(
|
||||
"Configuration syntax error not related to "
|
||||
"change context. Error won't be reported.")
|
||||
|
|
|
@ -81,23 +81,70 @@ NODE_STATES = set([STATE_BUILDING,
|
|||
STATE_HOLD,
|
||||
STATE_DELETING])
|
||||
|
||||
MAX_ERROR_LENGTH = 10
|
||||
|
||||
class ConfigurationErrorKey(object):
|
||||
"""A class which attempts to uniquely identify configuration errors
|
||||
based on their file location. It's not perfect, but it's usually
|
||||
sufficient to determine whether we should show an error to a user.
|
||||
"""
|
||||
|
||||
def __init__(self, context, mark, error_text):
|
||||
self.context = context
|
||||
self.mark = mark
|
||||
self.error_text = error_text
|
||||
elements = []
|
||||
if context:
|
||||
elements.extend([
|
||||
context.project.canonical_name,
|
||||
context.branch,
|
||||
context.path,
|
||||
])
|
||||
else:
|
||||
elements.extend([None, None, None])
|
||||
if mark:
|
||||
elements.extend([
|
||||
mark.line,
|
||||
mark.snippet,
|
||||
])
|
||||
else:
|
||||
elements.extend([None, None])
|
||||
elements.append(error_text)
|
||||
self._hash = hash('|'.join([str(x) for x in elements]))
|
||||
|
||||
def __hash__(self):
|
||||
return self._hash
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, ConfigurationErrorKey):
|
||||
return False
|
||||
return (self.context == other.context and
|
||||
self.mark.line == other.mark.line and
|
||||
self.mark.snippet == other.mark.snippet and
|
||||
self.error_text == other.error_text)
|
||||
|
||||
|
||||
class ConfigurationError(object):
|
||||
|
||||
"""A configuration error"""
|
||||
def __init__(self, context, mark, error):
|
||||
self.error = str(error)
|
||||
self.key = ConfigurationErrorKey(context, mark, self.error)
|
||||
|
||||
|
||||
class LoadingErrors(object):
|
||||
"""A configuration errors accumalator attached to a layout object
|
||||
"""
|
||||
def __init__(self, length):
|
||||
self.length = length
|
||||
def __init__(self):
|
||||
self.errors = []
|
||||
self.error_keys = set()
|
||||
|
||||
def append(self, error):
|
||||
if len(self.errors) < self.length:
|
||||
self.errors.append(error)
|
||||
|
||||
def extend(self, errors):
|
||||
for err in errors:
|
||||
self.append(err)
|
||||
def addError(self, context, mark, error):
|
||||
e = ConfigurationError(context, mark, error)
|
||||
self.errors.append(e)
|
||||
self.error_keys.add(e.key)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.errors[index]
|
||||
|
@ -2882,8 +2929,7 @@ class Layout(object):
|
|||
self.nodesets = {}
|
||||
self.secrets = {}
|
||||
self.semaphores = {}
|
||||
self.loading_errors = LoadingErrors(
|
||||
length=MAX_ERROR_LENGTH)
|
||||
self.loading_errors = LoadingErrors()
|
||||
|
||||
def getJob(self, name):
|
||||
if name in self.jobs:
|
||||
|
|
|
@ -364,6 +364,6 @@ class RPCListener(object):
|
|||
output = []
|
||||
for err in tenant.layout.loading_errors.errors:
|
||||
output.append({
|
||||
'source_context': err[0].toDict(),
|
||||
'error': err[1]})
|
||||
'source_context': err.key.context.toDict(),
|
||||
'error': err.error})
|
||||
job.sendWorkComplete(json.dumps(output))
|
||||
|
|
Loading…
Reference in New Issue