diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py index ec1e32a939..fe7c7cc4fa 100755 --- a/tests/test_scheduler.py +++ b/tests/test_scheduler.py @@ -2235,6 +2235,9 @@ class TestScheduler(ZuulTestCase): self.fake_gerrit.addEvent(A.addApproval('APRV', 1)) self.waitUntilSettled() + self.worker.release('project-merge') + self.waitUntilSettled() + port = self.webapp.server.socket.getsockname()[1] req = urllib2.Request("http://localhost:%s/status.json" % port) @@ -2255,7 +2258,7 @@ class TestScheduler(ZuulTestCase): self.waitUntilSettled() data = json.loads(data) - status_jobs = set() + status_jobs = [] for p in data['pipelines']: for q in p['change_queues']: if p['name'] in ['gate', 'conflict']: @@ -2267,10 +2270,24 @@ class TestScheduler(ZuulTestCase): self.assertTrue(change['active']) self.assertEqual(change['id'], '1,1') for job in change['jobs']: - status_jobs.add(job['name']) - self.assertIn('project-merge', status_jobs) - self.assertIn('project-test1', status_jobs) - self.assertIn('project-test2', status_jobs) + status_jobs.append(job) + self.assertEqual('project-merge', status_jobs[0]['name']) + self.assertEqual('https://server/job/project-merge/0/', + status_jobs[0]['url']) + self.assertEqual('http://logs.example.com/1/1/gate/project-merge/0', + status_jobs[0]['report_url']) + + self.assertEqual('project-test1', status_jobs[1]['name']) + self.assertEqual('https://server/job/project-test1/1/', + status_jobs[1]['url']) + self.assertEqual('http://logs.example.com/1/1/gate/project-test1/1', + status_jobs[1]['report_url']) + + self.assertEqual('project-test2', status_jobs[2]['name']) + self.assertEqual('https://server/job/project-test2/2/', + status_jobs[2]['url']) + self.assertEqual('http://logs.example.com/1/1/gate/project-test2/2', + status_jobs[2]['report_url']) def test_merging_queues(self): "Test that transitively-connected change queues are merged" diff --git a/zuul/model.py b/zuul/model.py index d2cf13bc7f..5bea5d03bb 100644 --- a/zuul/model.py +++ b/zuul/model.py @@ -266,7 +266,7 @@ class Pipeline(object): items.extend(shared_queue.queue) return items - def formatStatusJSON(self): + def formatStatusJSON(self, url_pattern=None): j_pipeline = dict(name=self.name, description=self.description) j_queues = [] @@ -283,7 +283,7 @@ class Pipeline(object): if j_changes: j_queue['heads'].append(j_changes) j_changes = [] - j_changes.append(e.formatJSON()) + j_changes.append(e.formatJSON(url_pattern)) if (len(j_changes) > 1 and (j_changes[-2]['remaining_time'] is not None) and (j_changes[-1]['remaining_time'] is not None)): @@ -724,7 +724,34 @@ class QueueItem(object): def setReportedResult(self, result): self.current_build_set.result = result - def formatJSON(self): + def formatJobResult(self, job, url_pattern=None): + build = self.current_build_set.getBuild(job.name) + result = build.result + pattern = url_pattern + if result == 'SUCCESS': + if job.success_message: + result = job.success_message + if job.success_pattern: + pattern = job.success_pattern + elif result == 'FAILURE': + if job.failure_message: + result = job.failure_message + if job.failure_pattern: + pattern = job.failure_pattern + url = None + if pattern: + try: + url = pattern.format(change=self.change, + pipeline=self.pipeline, + job=job, + build=build) + except Exception: + pass # FIXME: log this or something? + if not url: + url = build.url or job.name + return (result, url) + + def formatJSON(self, url_pattern=None): changeish = self.change ret = {} ret['active'] = self.active @@ -761,11 +788,13 @@ class QueueItem(object): elapsed = None remaining = None result = None - url = None + build_url = None + report_url = None worker = None if build: result = build.result - url = build.url + build_url = build.url + (unused, report_url) = self.formatJobResult(job, url_pattern) if build.start_time: if build.end_time: elapsed = int((build.end_time - @@ -793,7 +822,8 @@ class QueueItem(object): 'name': job.name, 'elapsed_time': elapsed, 'remaining_time': remaining, - 'url': url, + 'url': build_url, + 'report_url': report_url, 'result': result, 'voting': job.voting, 'uuid': build.uuid if build else None, diff --git a/zuul/reporter/__init__.py b/zuul/reporter/__init__.py index fd7917400e..0569fbe748 100644 --- a/zuul/reporter/__init__.py +++ b/zuul/reporter/__init__.py @@ -113,25 +113,7 @@ class BaseReporter(object): for job in pipeline.getJobs(item): build = item.current_build_set.getBuild(job.name) - result = build.result - pattern = url_pattern - if result == 'SUCCESS': - if job.success_message: - result = job.success_message - if job.success_pattern: - pattern = job.success_pattern - elif result == 'FAILURE': - if job.failure_message: - result = job.failure_message - if job.failure_pattern: - pattern = job.failure_pattern - if pattern: - url = pattern.format(change=item.change, - pipeline=pipeline, - job=job, - build=build) - else: - url = build.url or job.name + (result, url) = item.formatJobResult(job, url_pattern) if not job.voting: voting = ' (non-voting)' else: diff --git a/zuul/scheduler.py b/zuul/scheduler.py index 48bb5e318c..aea9a67e96 100644 --- a/zuul/scheduler.py +++ b/zuul/scheduler.py @@ -1097,6 +1097,11 @@ class Scheduler(threading.Thread): pipeline.manager.onMergeCompleted(event) def formatStatusJSON(self): + if self.config.has_option('zuul', 'url_pattern'): + url_pattern = self.config.get('zuul', 'url_pattern') + else: + url_pattern = None + data = {} data['zuul_version'] = self.zuul_version @@ -1122,7 +1127,7 @@ class Scheduler(threading.Thread): pipelines = [] data['pipelines'] = pipelines for pipeline in self.layout.pipelines.values(): - pipelines.append(pipeline.formatStatusJSON()) + pipelines.append(pipeline.formatStatusJSON(url_pattern)) return json.dumps(data)