Refactor configuration error handling
This refactors the the config error handling based on nested context managers that add increasing amounts of information about error locations. In other words, when we start processing a Job object, we will: with ParseContext.errorContext(info about job): do some stuff with ParseContext.errorContext(info about job attr): do some stuff regarding the job attribute with ParseContext.errorContext(next job attr): do stuff with a different attr We store a stack of error contexts on the parse context, and at any point we can access the accumulator for the most recent one with ParseContext.accumulator in order to add a warning or error. If we have an attribute line number, we'll use it, otherwise we'll just use the object-level information. We also collapse the exception hnadlers into a single context manager which catches exceptions and adds them to the accumulator. This lets us decide when to catch an exception and skip to the next phase of processing separately from where we narrow our focus to a new object or attribute. These two actions often happen together, but not always. This should serve to simplify the configloader code and make it easier to have consistent error handling within. Change-Id: I180f9b271acd4b62039003fa8b38900f9863bad8
This commit is contained in:
parent
a29b7e75af
commit
0ab44e153c
@ -233,10 +233,10 @@ class TestGerritWeb(ZuulTestCase):
|
||||
"""
|
||||
- job:
|
||||
name: garbage-job
|
||||
%s
|
||||
description: %s
|
||||
garbage: True
|
||||
"""
|
||||
) % ('\n' * 16384)
|
||||
) % ('x' * 16384)
|
||||
|
||||
file_dict = {'.zuul.yaml': in_repo_conf}
|
||||
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
|
||||
|
@ -225,7 +225,7 @@ class TestJob(BaseTestCase):
|
||||
'run': 'playbooks/python27.yaml'}}
|
||||
]
|
||||
}
|
||||
}, None)
|
||||
})
|
||||
self.layout.addProjectConfig(project_config)
|
||||
|
||||
change = model.Change(self.project)
|
||||
@ -290,7 +290,7 @@ class TestJob(BaseTestCase):
|
||||
'python27',
|
||||
]
|
||||
}
|
||||
}, None)
|
||||
})
|
||||
self.layout.addProjectConfig(project_config)
|
||||
|
||||
change = model.Change(self.project)
|
||||
@ -363,7 +363,7 @@ class TestJob(BaseTestCase):
|
||||
'job'
|
||||
]
|
||||
}
|
||||
}, None)
|
||||
})
|
||||
|
||||
self.layout.addProjectConfig(project_config)
|
||||
|
||||
@ -442,7 +442,7 @@ class TestJob(BaseTestCase):
|
||||
'gate': {
|
||||
'jobs': ['python27'],
|
||||
}
|
||||
}, None)
|
||||
})
|
||||
self.layout.addProjectConfig(project_config)
|
||||
|
||||
change = model.Change(self.project)
|
||||
|
@ -5423,15 +5423,13 @@ class TestPCREDeprecationGerrit(ZuulTestCase):
|
||||
idx = 0
|
||||
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
|
||||
self.assertEqual(errors[idx].name, 'Regex Deprecation')
|
||||
self.assertIn('pipeline stanza', errors[idx].error)
|
||||
self.assertIn('name: gate', errors[idx].error)
|
||||
self.assertIn('"gate" pipeline stanza', errors[idx].error)
|
||||
|
||||
# Pipeline gerrit require approval
|
||||
idx = 1
|
||||
self.assertEqual(errors[idx].severity, SEVERITY_WARNING)
|
||||
self.assertEqual(errors[idx].name, 'Regex Deprecation')
|
||||
self.assertIn('pipeline stanza', errors[idx].error)
|
||||
self.assertIn('name: post', errors[idx].error)
|
||||
self.assertIn('"post" pipeline stanza', errors[idx].error)
|
||||
|
||||
|
||||
class TestPCREDeprecationGit(ZuulTestCase):
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -129,7 +129,7 @@ class TriggerInterface(object, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def getTrigger(self, connection, config=None, loading_errors=None):
|
||||
def getTrigger(self, connection, config=None):
|
||||
"""Create and return a new trigger object.
|
||||
|
||||
This method is required by the interface.
|
||||
@ -142,8 +142,6 @@ class TriggerInterface(object, metaclass=abc.ABCMeta):
|
||||
or None.
|
||||
:arg dict config: The configuration information supplied along
|
||||
with the trigger in the layout.
|
||||
:arg dict loading_errors: A LoadingErrors instance to accumulate
|
||||
errors or warnings.
|
||||
|
||||
:returns: A new trigger object.
|
||||
:rtype: :py:class:`~zuul.trigger.BaseTrigger`
|
||||
|
@ -258,7 +258,7 @@ class GerritEventFilter(EventFilter):
|
||||
refs=[], event_approvals={}, comments=[], emails=[],
|
||||
usernames=[], required_approvals=[], reject_approvals=[],
|
||||
uuid=None, scheme=None, ignore_deletes=True,
|
||||
require=None, reject=None, error_accumulator=None):
|
||||
require=None, reject=None, parse_context=None):
|
||||
|
||||
EventFilter.__init__(self, connection_name, trigger)
|
||||
|
||||
@ -270,13 +270,13 @@ class GerritEventFilter(EventFilter):
|
||||
|
||||
if require:
|
||||
self.require_filter = GerritRefFilter.requiresFromConfig(
|
||||
connection_name, require, error_accumulator)
|
||||
connection_name, require, parse_context)
|
||||
else:
|
||||
self.require_filter = None
|
||||
|
||||
if reject:
|
||||
self.reject_filter = GerritRefFilter.rejectFromConfig(
|
||||
connection_name, reject, error_accumulator)
|
||||
connection_name, reject, parse_context)
|
||||
else:
|
||||
self.reject_filter = None
|
||||
|
||||
@ -438,7 +438,7 @@ class GerritEventFilter(EventFilter):
|
||||
|
||||
class GerritRefFilter(RefFilter):
|
||||
def __init__(self, connection_name,
|
||||
error_accumulator,
|
||||
parse_context,
|
||||
open=None, reject_open=None,
|
||||
current_patchset=None, reject_current_patchset=None,
|
||||
wip=None, reject_wip=None,
|
||||
@ -448,10 +448,10 @@ class GerritRefFilter(RefFilter):
|
||||
|
||||
self._required_approvals = copy.deepcopy(required_approvals)
|
||||
self.required_approvals = self._tidy_approvals(
|
||||
self._required_approvals, error_accumulator)
|
||||
self._required_approvals, parse_context)
|
||||
self._reject_approvals = copy.deepcopy(reject_approvals)
|
||||
self.reject_approvals = self._tidy_approvals(
|
||||
self._reject_approvals, error_accumulator)
|
||||
self._reject_approvals, parse_context)
|
||||
self.statuses = statuses
|
||||
self.reject_statuses = reject_statuses
|
||||
|
||||
@ -469,10 +469,10 @@ class GerritRefFilter(RefFilter):
|
||||
self.current_patchset = current_patchset
|
||||
|
||||
@classmethod
|
||||
def requiresFromConfig(cls, connection_name, config, error_accumulator):
|
||||
def requiresFromConfig(cls, connection_name, config, parse_context):
|
||||
return cls(
|
||||
connection_name=connection_name,
|
||||
error_accumulator=error_accumulator,
|
||||
parse_context=parse_context,
|
||||
open=config.get('open'),
|
||||
current_patchset=config.get('current-patchset'),
|
||||
wip=config.get('wip'),
|
||||
@ -481,10 +481,10 @@ class GerritRefFilter(RefFilter):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def rejectFromConfig(cls, connection_name, config, error_accumulator):
|
||||
def rejectFromConfig(cls, connection_name, config, parse_context):
|
||||
return cls(
|
||||
connection_name=connection_name,
|
||||
error_accumulator=error_accumulator,
|
||||
parse_context=parse_context,
|
||||
reject_open=config.get('open'),
|
||||
reject_current_patchset=config.get('current-patchset'),
|
||||
reject_wip=config.get('wip'),
|
||||
@ -565,17 +565,20 @@ class GerritRefFilter(RefFilter):
|
||||
|
||||
return True
|
||||
|
||||
def _tidy_approvals(self, approvals, error_accumulator):
|
||||
def _tidy_approvals(self, approvals, parse_context):
|
||||
for a in approvals:
|
||||
for k, v in a.items():
|
||||
if k == 'username':
|
||||
a['username'] = make_regex(v, error_accumulator)
|
||||
elif k == 'email':
|
||||
a['email'] = make_regex(v, error_accumulator)
|
||||
elif k == 'newer-than':
|
||||
a[k] = time_to_seconds(v)
|
||||
elif k == 'older-than':
|
||||
a[k] = time_to_seconds(v)
|
||||
if 'username' in a:
|
||||
with parse_context.confAttr(a, 'username') as v:
|
||||
a['username'] = make_regex(v, parse_context)
|
||||
if 'email' in a:
|
||||
with parse_context.confAttr(a, 'email') as v:
|
||||
a['email'] = make_regex(v, parse_context)
|
||||
if 'newer-than' in a:
|
||||
with parse_context.confAttr(a, 'newer-than') as v:
|
||||
a['newer-than'] = time_to_seconds(v)
|
||||
if 'older-than' in a:
|
||||
with parse_context.confAttr(a, 'older-than') as v:
|
||||
a['older-than'] = time_to_seconds(v)
|
||||
return approvals
|
||||
|
||||
def _match_approval_required_approval(self, rapproval, approval):
|
||||
|
@ -225,16 +225,16 @@ class GerritSource(BaseSource):
|
||||
def _getGitwebUrl(self, project, sha=None):
|
||||
return self.connection._getGitwebUrl(project, sha)
|
||||
|
||||
def getRequireFilters(self, config, error_accumulator):
|
||||
def getRequireFilters(self, config, parse_context):
|
||||
f = GerritRefFilter.requiresFromConfig(
|
||||
self.connection.connection_name,
|
||||
config, error_accumulator)
|
||||
config, parse_context)
|
||||
return [f]
|
||||
|
||||
def getRejectFilters(self, config, error_accumulator):
|
||||
def getRejectFilters(self, config, parse_context):
|
||||
f = GerritRefFilter.rejectFromConfig(
|
||||
self.connection.connection_name,
|
||||
config, error_accumulator)
|
||||
config, parse_context)
|
||||
return [f]
|
||||
|
||||
def getRefForChange(self, change):
|
||||
|
@ -23,7 +23,6 @@ from zuul.driver.util import (
|
||||
to_list,
|
||||
make_regex,
|
||||
ZUUL_REGEX,
|
||||
get_conf_attr,
|
||||
)
|
||||
from zuul.configloader import DeprecationWarning
|
||||
|
||||
@ -45,7 +44,8 @@ class GerritTrigger(BaseTrigger):
|
||||
log = logging.getLogger("zuul.GerritTrigger")
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_conf,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
pcontext = parse_context
|
||||
efilters = []
|
||||
for trigger in to_list(trigger_conf):
|
||||
approvals = {}
|
||||
@ -53,34 +53,37 @@ class GerritTrigger(BaseTrigger):
|
||||
for key, val in approval_dict.items():
|
||||
approvals[key] = val
|
||||
# Backwards compat for *_filter versions of these args
|
||||
comments = to_list(trigger.get('comment'))
|
||||
if not comments:
|
||||
comments = to_list(trigger.get('comment_filter'))
|
||||
emails = to_list(trigger.get('email'))
|
||||
if not emails:
|
||||
emails = to_list(trigger.get('email_filter'))
|
||||
usernames = to_list(trigger.get('username'))
|
||||
if not usernames:
|
||||
usernames = to_list(trigger.get('username_filter'))
|
||||
ignore_deletes = trigger.get('ignore-deletes', True)
|
||||
if 'require-approval' in trigger:
|
||||
acc, _ = get_conf_attr(trigger, 'require-approval',
|
||||
error_accumulator)
|
||||
acc.addError(GerritRequireApprovalDeprecation())
|
||||
if 'reject-approval' in trigger:
|
||||
acc, _ = get_conf_attr(trigger, 'reject-approval',
|
||||
error_accumulator)
|
||||
acc.addError(GerritRejectApprovalDeprecation())
|
||||
attrname = 'comment' if 'comment' in trigger else 'comment_filter'
|
||||
with pcontext.confAttr(trigger, attrname) as attr:
|
||||
comments = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
attrname = 'email' if 'email' in trigger else 'email_filter'
|
||||
with pcontext.confAttr(trigger, attrname) as attr:
|
||||
emails = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
attrname = ('username' if 'username' in trigger
|
||||
else 'username_filter')
|
||||
with pcontext.confAttr(trigger, attrname) as attr:
|
||||
usernames = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
|
||||
types = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger['event'])]
|
||||
branches = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('branch'))]
|
||||
refs = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('ref'))]
|
||||
comments = [make_regex(x, error_accumulator) for x in comments]
|
||||
emails = [make_regex(x, error_accumulator) for x in emails]
|
||||
usernames = [make_regex(x, error_accumulator) for x in usernames]
|
||||
with pcontext.confAttr(trigger, 'event') as attr:
|
||||
types = [make_regex(x, pcontext) for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'branch') as attr:
|
||||
branches = [make_regex(x, pcontext) for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'ref') as attr:
|
||||
refs = [make_regex(x, pcontext) for x in to_list(attr)]
|
||||
|
||||
ignore_deletes = trigger.get('ignore-deletes', True)
|
||||
|
||||
if 'require-approval' in trigger:
|
||||
with pcontext.confAttr(trigger, 'require-approval'):
|
||||
pcontext.accumulator.addError(
|
||||
GerritRequireApprovalDeprecation())
|
||||
if 'reject-approval' in trigger:
|
||||
with pcontext.confAttr(trigger, 'reject-approval'):
|
||||
pcontext.accumulator.addError(
|
||||
GerritRejectApprovalDeprecation())
|
||||
|
||||
f = GerritEventFilter(
|
||||
connection_name=connection_name,
|
||||
@ -103,7 +106,7 @@ class GerritTrigger(BaseTrigger):
|
||||
ignore_deletes=ignore_deletes,
|
||||
require=trigger.get('require'),
|
||||
reject=trigger.get('reject'),
|
||||
error_accumulator=error_accumulator,
|
||||
parse_context=parse_context,
|
||||
)
|
||||
efilters.append(f)
|
||||
|
||||
|
@ -80,10 +80,10 @@ class GitSource(BaseSource):
|
||||
def getProjectOpenChanges(self, project):
|
||||
raise NotImplementedError()
|
||||
|
||||
def getRequireFilters(self, config, error_accumulator):
|
||||
def getRequireFilters(self, config, parse_context):
|
||||
return []
|
||||
|
||||
def getRejectFilters(self, config, error_accumulator):
|
||||
def getRejectFilters(self, config, parse_context):
|
||||
return []
|
||||
|
||||
def getRefForChange(self, change):
|
||||
|
@ -25,12 +25,13 @@ class GitTrigger(BaseTrigger):
|
||||
log = logging.getLogger("zuul.GitTrigger")
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_conf,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
efilters = []
|
||||
pcontext = parse_context
|
||||
for trigger in to_list(trigger_conf):
|
||||
|
||||
refs = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('ref'))]
|
||||
with pcontext.confAttr(trigger, 'ref') as attr:
|
||||
refs = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
|
||||
f = GitEventFilter(
|
||||
connection_name=connection_name,
|
||||
|
@ -186,13 +186,13 @@ class GithubSource(BaseSource):
|
||||
def _ghTimestampToDate(self, timestamp):
|
||||
return time.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
|
||||
|
||||
def getRequireFilters(self, config, error_accumulator):
|
||||
def getRequireFilters(self, config, parse_context):
|
||||
f = GithubRefFilter.requiresFromConfig(
|
||||
self.connection.connection_name,
|
||||
config)
|
||||
return [f]
|
||||
|
||||
def getRejectFilters(self, config, error_accumulator):
|
||||
def getRejectFilters(self, config, parse_context):
|
||||
f = GithubRefFilter.rejectFromConfig(
|
||||
self.connection.connection_name,
|
||||
config)
|
||||
|
@ -37,18 +37,23 @@ class GithubTrigger(BaseTrigger):
|
||||
super().__init__(driver, connection, config=config)
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_config,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
efilters = []
|
||||
pcontext = parse_context
|
||||
for trigger in to_list(trigger_config):
|
||||
|
||||
types = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger['event'])]
|
||||
branches = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('branch'))]
|
||||
refs = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('ref'))]
|
||||
comments = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('comment'))]
|
||||
with pcontext.confAttr(trigger, 'event') as attr:
|
||||
types = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'branch') as attr:
|
||||
branches = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'ref') as attr:
|
||||
refs = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'comment') as attr:
|
||||
comments = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
|
||||
f = GithubEventFilter(
|
||||
connection_name=connection_name,
|
||||
|
@ -140,7 +140,7 @@ class GitlabSource(BaseSource):
|
||||
"""Get the git-web url for a project."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def getRequireFilters(self, config, error_accumulator):
|
||||
def getRequireFilters(self, config, parse_context):
|
||||
f = GitlabRefFilter(
|
||||
connection_name=self.connection.connection_name,
|
||||
open=config.get('open'),
|
||||
@ -150,7 +150,7 @@ class GitlabSource(BaseSource):
|
||||
)
|
||||
return [f]
|
||||
|
||||
def getRejectFilters(self, config, error_accumulator):
|
||||
def getRejectFilters(self, config, parse_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def getRefForChange(self, change):
|
||||
|
@ -25,15 +25,19 @@ class GitlabTrigger(BaseTrigger):
|
||||
log = logging.getLogger("zuul.trigger.GitlabTrigger")
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_config,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
efilters = []
|
||||
pcontext = parse_context
|
||||
for trigger in to_list(trigger_config):
|
||||
types = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger['event'])]
|
||||
refs = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('ref'))]
|
||||
comments = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('comment'))]
|
||||
with pcontext.confAttr(trigger, 'event') as attr:
|
||||
types = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'ref') as attr:
|
||||
refs = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'comment') as attr:
|
||||
comments = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
|
||||
f = GitlabEventFilter(
|
||||
connection_name=connection_name,
|
||||
|
@ -144,7 +144,7 @@ class PagureSource(BaseSource):
|
||||
"""Get the git-web url for a project."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def getRequireFilters(self, config, error_accumulator):
|
||||
def getRequireFilters(self, config, parse_context):
|
||||
f = PagureRefFilter(
|
||||
connection_name=self.connection.connection_name,
|
||||
score=config.get('score'),
|
||||
@ -155,7 +155,7 @@ class PagureSource(BaseSource):
|
||||
)
|
||||
return [f]
|
||||
|
||||
def getRejectFilters(self, config, error_accumulator):
|
||||
def getRejectFilters(self, config, parse_context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def getRefForChange(self, change):
|
||||
|
@ -25,15 +25,19 @@ class PagureTrigger(BaseTrigger):
|
||||
log = logging.getLogger("zuul.trigger.PagureTrigger")
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_config,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
efilters = []
|
||||
pcontext = parse_context
|
||||
for trigger in to_list(trigger_config):
|
||||
types = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger['event'])]
|
||||
refs = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('ref'))]
|
||||
comments = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('comment'))]
|
||||
with pcontext.confAttr(trigger, 'event') as attr:
|
||||
types = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'ref') as attr:
|
||||
refs = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'comment') as attr:
|
||||
comments = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
|
||||
f = PagureEventFilter(
|
||||
connection_name=connection_name,
|
||||
|
@ -25,10 +25,10 @@ class TimerTrigger(BaseTrigger):
|
||||
name = 'timer'
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_conf,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
efilters = []
|
||||
for trigger in to_list(trigger_conf):
|
||||
types = [make_regex('timer', error_accumulator)]
|
||||
types = [make_regex('timer')]
|
||||
f = TimerEventFilter(connection_name=connection_name,
|
||||
trigger=self,
|
||||
types=types,
|
||||
|
@ -17,7 +17,7 @@
|
||||
|
||||
import voluptuous as vs
|
||||
|
||||
from zuul.configloader import ZUUL_REGEX, make_regex, get_conf_attr # noqa
|
||||
from zuul.configloader import ZUUL_REGEX, make_regex # noqa
|
||||
|
||||
|
||||
def time_to_seconds(s):
|
||||
|
@ -30,13 +30,16 @@ class ZuulTrigger(BaseTrigger):
|
||||
self._handle_project_change_merged_events = False
|
||||
|
||||
def getEventFilters(self, connection_name, trigger_conf,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
efilters = []
|
||||
pcontext = parse_context
|
||||
for trigger in to_list(trigger_conf):
|
||||
types = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger['event'])]
|
||||
pipelines = [make_regex(x, error_accumulator)
|
||||
for x in to_list(trigger.get('pipeline'))]
|
||||
with pcontext.confAttr(trigger, 'event') as attr:
|
||||
types = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
with pcontext.confAttr(trigger, 'pipeline') as attr:
|
||||
pipelines = [make_regex(x, pcontext)
|
||||
for x in to_list(attr)]
|
||||
f = ZuulEventFilter(
|
||||
connection_name=connection_name,
|
||||
trigger=self,
|
||||
|
@ -3097,7 +3097,9 @@ class Job(ConfigObject):
|
||||
def __getattr__(self, name):
|
||||
v = self.__dict__.get(name)
|
||||
if v is None:
|
||||
return self.attributes[name]
|
||||
if name in self.attributes:
|
||||
return self.attributes[name]
|
||||
raise AttributeError
|
||||
return v
|
||||
|
||||
def _get(self, name):
|
||||
|
@ -27,7 +27,7 @@ class BaseTrigger(object, metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def getEventFilters(self, connection_name, trigger_conf,
|
||||
error_accumulator):
|
||||
parse_context):
|
||||
"""Return a list of EventFilter's for the scheduler to match against.
|
||||
"""
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user