Remove webapp

The webapp has been superseeded by zuul-web now so remove it
completely.

Change-Id: I8125a0d7f3aef8fa7982c75d4650776b6906a612
This commit is contained in:
Tobias Henkel 2018-01-23 12:34:15 +01:00
parent 940da00e9b
commit e0bad8dc05
No known key found for this signature in database
GPG Key ID: 03750DEC158E5FA2
8 changed files with 94 additions and 485 deletions

View File

@ -243,23 +243,6 @@ The following sections of ``zuul.conf`` are used by the scheduler:
.. TODO: is this effectively required? .. TODO: is this effectively required?
.. attr:: webapp
.. attr:: listen_address
:default: all addresses
IP address or domain name on which to listen.
.. attr:: port
:default: 8001
Port on which the webapp is listening.
.. attr:: status_expiry
:default: 1
Zuul will cache the status.json file for this many seconds.
.. attr:: scheduler .. attr:: scheduler
.. attr:: command_socket .. attr:: command_socket

View File

@ -41,10 +41,6 @@ static_cache_expiry=0
;sql_connection_name=mydatabase ;sql_connection_name=mydatabase
status_url=https://zuul.example.com/status status_url=https://zuul.example.com/status
[webapp]
listen_address=0.0.0.0
port=8001
[connection gerrit] [connection gerrit]
driver=gerrit driver=gerrit
server=review.example.com server=review.example.com

View File

@ -57,7 +57,6 @@ import zuul.driver.gerrit.gerritsource as gerritsource
import zuul.driver.gerrit.gerritconnection as gerritconnection import zuul.driver.gerrit.gerritconnection as gerritconnection
import zuul.driver.github.githubconnection as githubconnection import zuul.driver.github.githubconnection as githubconnection
import zuul.scheduler import zuul.scheduler
import zuul.webapp
import zuul.executor.server import zuul.executor.server
import zuul.executor.client import zuul.executor.client
import zuul.lib.connections import zuul.lib.connections
@ -2009,9 +2008,6 @@ class ZuulTestCase(BaseTestCase):
self.sched = zuul.scheduler.Scheduler(self.config) self.sched = zuul.scheduler.Scheduler(self.config)
self.sched._stats_interval = 1 self.sched._stats_interval = 1
self.webapp = zuul.webapp.WebApp(
self.sched, port=0, listen_address='127.0.0.1')
self.event_queues = [ self.event_queues = [
self.sched.result_event_queue, self.sched.result_event_queue,
self.sched.trigger_event_queue, self.sched.trigger_event_queue,
@ -2050,7 +2046,6 @@ class ZuulTestCase(BaseTestCase):
self.sched.setZooKeeper(self.zk) self.sched.setZooKeeper(self.zk)
self.sched.start() self.sched.start()
self.webapp.start()
self.executor_client.gearman.waitForServer() self.executor_client.gearman.waitForServer()
# Cleanups are run in reverse order # Cleanups are run in reverse order
self.addCleanup(self.assertCleanShutdown) self.addCleanup(self.assertCleanShutdown)
@ -2315,8 +2310,6 @@ class ZuulTestCase(BaseTestCase):
self.sched.join() self.sched.join()
self.statsd.stop() self.statsd.stop()
self.statsd.join() self.statsd.join()
self.webapp.stop()
self.webapp.join()
self.rpcclient.shutdown() self.rpcclient.shutdown()
self.gearman_server.shutdown() self.gearman_server.shutdown()
self.fake_nodepool.stop() self.fake_nodepool.stop()

View File

@ -19,14 +19,12 @@ import json
import textwrap import textwrap
import os import os
import re
import shutil import shutil
import time import time
from unittest import skip from unittest import skip
import git import git
import testtools import testtools
import urllib
import zuul.change_matcher import zuul.change_matcher
from zuul.driver.gerrit import gerritreporter from zuul.driver.gerrit import gerritreporter
@ -2533,110 +2531,6 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(self.history[4].pipeline, 'check') self.assertEqual(self.history[4].pipeline, 'check')
self.assertEqual(self.history[5].pipeline, 'check') self.assertEqual(self.history[5].pipeline, 'check')
def test_json_status(self):
"Test that we can retrieve JSON status info"
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.executor_server.release('project-merge')
self.waitUntilSettled()
port = self.webapp.server.socket.getsockname()[1]
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status" % port)
f = urllib.request.urlopen(req)
headers = f.info()
self.assertIn('Content-Length', headers)
self.assertIn('Content-Type', headers)
self.assertIsNotNone(re.match('^application/json(; charset=UTF-8)?$',
headers['Content-Type']))
self.assertIn('Access-Control-Allow-Origin', headers)
self.assertIn('Cache-Control', headers)
self.assertIn('Last-Modified', headers)
self.assertIn('Expires', headers)
data = f.read().decode('utf8')
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
data = json.loads(data)
status_jobs = []
for p in data['pipelines']:
for q in p['change_queues']:
if p['name'] in ['gate', 'conflict']:
self.assertEqual(q['window'], 20)
else:
self.assertEqual(q['window'], 0)
for head in q['heads']:
for change in head:
self.assertTrue(change['active'])
self.assertEqual(change['id'], '1,1')
for job in change['jobs']:
status_jobs.append(job)
self.assertEqual('project-merge', status_jobs[0]['name'])
# TODO(mordred) pull uuids from self.builds
self.assertEqual(
'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[0]['uuid']),
status_jobs[0]['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[0]['uuid']),
status_jobs[0]['finger_url'])
# TOOD(mordred) configure a success-url on the base job
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[0]['uuid']),
status_jobs[0]['report_url'])
self.assertEqual('project-test1', status_jobs[1]['name'])
self.assertEqual(
'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[1]['uuid']),
status_jobs[1]['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[1]['uuid']),
status_jobs[1]['finger_url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[1]['uuid']),
status_jobs[1]['report_url'])
self.assertEqual('project-test2', status_jobs[2]['name'])
self.assertEqual(
'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[2]['uuid']),
status_jobs[2]['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[2]['uuid']),
status_jobs[2]['finger_url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[2]['uuid']),
status_jobs[2]['report_url'])
# check job dependencies
self.assertIsNotNone(status_jobs[0]['dependencies'])
self.assertIsNotNone(status_jobs[1]['dependencies'])
self.assertIsNotNone(status_jobs[2]['dependencies'])
self.assertEqual(len(status_jobs[0]['dependencies']), 0)
self.assertEqual(len(status_jobs[1]['dependencies']), 1)
self.assertEqual(len(status_jobs[2]['dependencies']), 1)
self.assertIn('project-merge', status_jobs[1]['dependencies'])
self.assertIn('project-merge', status_jobs[2]['dependencies'])
def test_reconfigure_merge(self): def test_reconfigure_merge(self):
"""Test that two reconfigure events are merged""" """Test that two reconfigure events are merged"""
@ -3212,13 +3106,6 @@ class TestScheduler(ZuulTestCase):
self.assertEqual(len(self.builds), 2) self.assertEqual(len(self.builds), 2)
port = self.webapp.server.socket.getsockname()[1]
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status" % port)
f = urllib.request.urlopen(req)
data = f.read().decode('utf8')
self.executor_server.hold_jobs_in_build = False self.executor_server.hold_jobs_in_build = False
# Stop queuing timer triggered jobs so that the assertions # Stop queuing timer triggered jobs so that the assertions
# below don't race against more jobs being queued. # below don't race against more jobs being queued.
@ -3240,16 +3127,6 @@ class TestScheduler(ZuulTestCase):
ref='refs/heads/stable'), ref='refs/heads/stable'),
], ordered=False) ], ordered=False)
data = json.loads(data)
status_jobs = set()
for p in data['pipelines']:
for q in p['change_queues']:
for head in q['heads']:
for change in head:
for job in change['jobs']:
status_jobs.add(job['name'])
self.assertIn('project-bitrot', status_jobs)
def test_idle(self): def test_idle(self):
"Test that frequent periodic jobs work" "Test that frequent periodic jobs work"
# This test can not use simple_layout because it must start # This test can not use simple_layout because it must start

View File

@ -78,14 +78,105 @@ class TestWeb(ZuulTestCase):
super(TestWeb, self).tearDown() super(TestWeb, self).tearDown()
def test_web_status(self): def test_web_status(self):
"Test that we can filter to only certain changes in the webapp." "Test that we can retrieve JSON status info"
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
self.waitUntilSettled()
self.executor_server.release('project-merge')
self.waitUntilSettled()
req = urllib.request.Request( req = urllib.request.Request(
"http://localhost:%s/tenant-one/status.json" % self.port) "http://localhost:%s/tenant-one/status.json" % self.port)
f = urllib.request.urlopen(req) f = urllib.request.urlopen(req)
data = json.loads(f.read().decode('utf8')) headers = f.info()
self.assertIn('Content-Length', headers)
self.assertIn('Content-Type', headers)
self.assertEqual(
'application/json; charset=utf-8', headers['Content-Type'])
self.assertIn('Access-Control-Allow-Origin', headers)
self.assertIn('Cache-Control', headers)
self.assertIn('Last-Modified', headers)
data = f.read().decode('utf8')
self.assertIn('pipelines', data) self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
data = json.loads(data)
status_jobs = []
for p in data['pipelines']:
for q in p['change_queues']:
if p['name'] in ['gate', 'conflict']:
self.assertEqual(q['window'], 20)
else:
self.assertEqual(q['window'], 0)
for head in q['heads']:
for change in head:
self.assertTrue(change['active'])
self.assertIn(change['id'], ('1,1', '2,1', '3,1'))
for job in change['jobs']:
status_jobs.append(job)
self.assertEqual('project-merge', status_jobs[0]['name'])
# TODO(mordred) pull uuids from self.builds
self.assertEqual(
'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[0]['uuid']),
status_jobs[0]['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[0]['uuid']),
status_jobs[0]['finger_url'])
# TOOD(mordred) configure a success-url on the base job
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[0]['uuid']),
status_jobs[0]['report_url'])
self.assertEqual('project-test1', status_jobs[1]['name'])
self.assertEqual(
'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[1]['uuid']),
status_jobs[1]['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[1]['uuid']),
status_jobs[1]['finger_url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[1]['uuid']),
status_jobs[1]['report_url'])
self.assertEqual('project-test2', status_jobs[2]['name'])
self.assertEqual(
'stream.html?uuid={uuid}&logfile=console.log'.format(
uuid=status_jobs[2]['uuid']),
status_jobs[2]['url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[2]['uuid']),
status_jobs[2]['finger_url'])
self.assertEqual(
'finger://{hostname}/{uuid}'.format(
hostname=self.executor_server.hostname,
uuid=status_jobs[2]['uuid']),
status_jobs[2]['report_url'])
# check job dependencies
self.assertIsNotNone(status_jobs[0]['dependencies'])
self.assertIsNotNone(status_jobs[1]['dependencies'])
self.assertIsNotNone(status_jobs[2]['dependencies'])
self.assertEqual(len(status_jobs[0]['dependencies']), 0)
self.assertEqual(len(status_jobs[1]['dependencies']), 1)
self.assertEqual(len(status_jobs[2]['dependencies']), 1)
self.assertIn('project-merge', status_jobs[1]['dependencies'])
self.assertIn('project-merge', status_jobs[2]['dependencies'])
def test_web_bad_url(self): def test_web_bad_url(self):
# do we 404 correctly # do we 404 correctly

View File

@ -1,119 +0,0 @@
#!/usr/bin/env python
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# Copyright 2014 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import json
import urllib
import webob
from tests.base import ZuulTestCase, FIXTURE_DIR
class TestWebapp(ZuulTestCase):
tenant_config_file = 'config/single-tenant/main.yaml'
def setUp(self):
super(TestWebapp, self).setUp()
self.executor_server.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
A.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(A.addApproval('Approved', 1))
B = self.fake_gerrit.addFakeChange('org/project1', 'master', 'B')
B.addApproval('Code-Review', 2)
self.fake_gerrit.addEvent(B.addApproval('Approved', 1))
self.waitUntilSettled()
self.port = self.webapp.server.socket.getsockname()[1]
def tearDown(self):
self.executor_server.hold_jobs_in_build = False
self.executor_server.release()
self.waitUntilSettled()
super(TestWebapp, self).tearDown()
def test_webapp_status(self):
"Test that we can filter to only certain changes in the webapp."
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status" % self.port)
f = urllib.request.urlopen(req)
data = json.loads(f.read().decode('utf8'))
self.assertIn('pipelines', data)
def test_webapp_status_compat(self):
# testing compat with status.json
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status.json" % self.port)
f = urllib.request.urlopen(req)
data = json.loads(f.read().decode('utf8'))
self.assertIn('pipelines', data)
def test_webapp_bad_url(self):
# do we 404 correctly
req = urllib.request.Request(
"http://localhost:%s/status/foo" % self.port)
self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, req)
def test_webapp_find_change(self):
# can we filter by change id
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status/change/1,1" % self.port)
f = urllib.request.urlopen(req)
data = json.loads(f.read().decode('utf8'))
self.assertEqual(1, len(data), data)
self.assertEqual("org/project", data[0]['project'])
req = urllib.request.Request(
"http://localhost:%s/tenant-one/status/change/2,1" % self.port)
f = urllib.request.urlopen(req)
data = json.loads(f.read().decode('utf8'))
self.assertEqual(1, len(data), data)
self.assertEqual("org/project1", data[0]['project'], data)
def test_webapp_keys(self):
with open(os.path.join(FIXTURE_DIR, 'public.pem'), 'rb') as f:
public_pem = f.read()
req = urllib.request.Request(
"http://localhost:%s/tenant-one/keys/gerrit/org/project.pub" %
self.port)
f = urllib.request.urlopen(req)
self.assertEqual(f.read(), public_pem)
def test_webapp_custom_handler(self):
def custom_handler(path, tenant_name, request):
return webob.Response(body='ok')
self.webapp.register_path('/custom', custom_handler)
req = urllib.request.Request(
"http://localhost:%s/custom" % self.port)
f = urllib.request.urlopen(req)
self.assertEqual(b'ok', f.read())
self.webapp.unregister_path('/custom')
self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, req)
def test_webapp_404_on_unknown_tenant(self):
req = urllib.request.Request(
"http://localhost:{}/non-tenant/status.json".format(self.port))
e = self.assertRaises(
urllib.error.HTTPError, urllib.request.urlopen, req)
self.assertEqual(404, e.code)

View File

@ -118,7 +118,6 @@ class Scheduler(zuul.cmd.ZuulDaemonApp):
import zuul.executor.client import zuul.executor.client
import zuul.merger.client import zuul.merger.client
import zuul.nodepool import zuul.nodepool
import zuul.webapp
import zuul.zk import zuul.zk
if (self.config.has_option('gearman_server', 'start') and if (self.config.has_option('gearman_server', 'start') and
@ -142,15 +141,6 @@ class Scheduler(zuul.cmd.ZuulDaemonApp):
zookeeper.connect(zookeeper_hosts, timeout=zookeeper_timeout) zookeeper.connect(zookeeper_hosts, timeout=zookeeper_timeout)
cache_expiry = get_default(self.config, 'webapp', 'status_expiry', 1)
listen_address = get_default(self.config, 'webapp', 'listen_address',
'0.0.0.0')
port = get_default(self.config, 'webapp', 'port', 8001)
webapp = zuul.webapp.WebApp(
self.sched, port=port, cache_expiry=cache_expiry,
listen_address=listen_address)
self.configure_connections() self.configure_connections()
self.sched.setExecutor(gearman) self.sched.setExecutor(gearman)
self.sched.setMerger(merger) self.sched.setMerger(merger)
@ -168,8 +158,6 @@ class Scheduler(zuul.cmd.ZuulDaemonApp):
# TODO(jeblair): If we had all threads marked as daemon, # TODO(jeblair): If we had all threads marked as daemon,
# we might be able to have a nicer way of exiting here. # we might be able to have a nicer way of exiting here.
sys.exit(1) sys.exit(1)
self.log.info('Starting Webapp')
webapp.start()
signal.signal(signal.SIGHUP, self.reconfigure_handler) signal.signal(signal.SIGHUP, self.reconfigure_handler)

View File

@ -1,200 +0,0 @@
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import logging
import re
import threading
import time
from paste import httpserver
import webob
from webob import dec
from zuul.lib import encryption
"""Zuul main web app.
Zuul supports HTTP requests directly against it for determining the
change status. These responses are provided as json data structures.
The supported urls are:
- /status: return a complex data structure that represents the entire
queue / pipeline structure of the system
- /status.json (backwards compatibility): same as /status
- /status/change/X,Y: return status just for gerrit change X,Y
- /keys/SOURCE/PROJECT.pub: return the public key for PROJECT
When returning status for a single gerrit change you will get an
array of changes, they will not include the queue structure.
"""
class WebApp(threading.Thread):
log = logging.getLogger("zuul.WebApp")
change_path_regexp = '/status/change/(.*)$'
def __init__(self, scheduler, port=8001, cache_expiry=1,
listen_address='0.0.0.0'):
threading.Thread.__init__(self)
self.scheduler = scheduler
self.listen_address = listen_address
self.port = port
self.cache_expiry = cache_expiry
self.cache_time = 0
self.cache = {}
self.daemon = True
self.routes = {}
self._init_default_routes()
self.server = httpserver.serve(
dec.wsgify(self.app), host=self.listen_address, port=self.port,
start_loop=False)
def _init_default_routes(self):
self.register_path('/(status\.json|status)$', self.status)
self.register_path(self.change_path_regexp, self.change)
def run(self):
self.server.serve_forever()
def stop(self):
self.server.server_close()
def _changes_by_func(self, func, tenant_name):
"""Filter changes by a user provided function.
In order to support arbitrary collection of subsets of changes
we provide a low level filtering mechanism that takes a
function which applies to changes. The output of this function
is a flattened list of those collected changes.
"""
status = []
jsonstruct = json.loads(self.cache[tenant_name])
for pipeline in jsonstruct['pipelines']:
for change_queue in pipeline['change_queues']:
for head in change_queue['heads']:
for change in head:
if func(change):
status.append(copy.deepcopy(change))
return json.dumps(status)
def _status_for_change(self, rev, tenant_name):
"""Return the statuses for a particular change id X,Y."""
def func(change):
return change['id'] == rev
return self._changes_by_func(func, tenant_name)
def register_path(self, path, handler):
path_re = re.compile(path)
self.routes[path] = (path_re, handler)
def unregister_path(self, path):
if self.routes.get(path):
del self.routes[path]
def _handle_keys(self, request, path):
m = re.match('/keys/(.*?)/(.*?).pub', path)
if not m:
raise webob.exc.HTTPBadRequest()
source_name = m.group(1)
project_name = m.group(2)
source = self.scheduler.connections.getSource(source_name)
if not source:
raise webob.exc.HTTPNotFound(
detail="Cannot locate a source named %s" % source_name)
project = source.getProject(project_name)
if not project or not hasattr(project, 'public_key'):
raise webob.exc.HTTPNotFound(
detail="Cannot locate a project named %s" % project_name)
pem_public_key = encryption.serialize_rsa_public_key(
project.public_key)
response = webob.Response(body=pem_public_key,
content_type='text/plain')
return response.conditional_response_app
def app(self, request):
# Try registered paths without a tenant_name first
path = request.path
for path_re, handler in self.routes.values():
if path_re.match(path):
return handler(path, '', request)
# Now try with a tenant_name stripped
x, tenant_name, path = request.path.split('/', 2)
path = '/' + path
# Handle keys
if path.startswith('/keys'):
try:
return self._handle_keys(request, path)
except Exception as e:
self.log.exception("Issue with _handle_keys")
raise
for path_re, handler in self.routes.values():
if path_re.match(path):
return handler(path, tenant_name, request)
else:
raise webob.exc.HTTPNotFound()
def status(self, path, tenant_name, request):
def func():
return webob.Response(body=self.cache[tenant_name],
content_type='application/json',
charset='utf8')
if tenant_name not in self.scheduler.abide.tenants:
raise webob.exc.HTTPNotFound()
return self._response_with_status_cache(func, tenant_name)
def change(self, path, tenant_name, request):
def func():
m = re.match(self.change_path_regexp, path)
change_id = m.group(1)
status = self._status_for_change(change_id, tenant_name)
if status:
return webob.Response(body=status,
content_type='application/json',
charset='utf8')
else:
raise webob.exc.HTTPNotFound()
return self._response_with_status_cache(func, tenant_name)
def _refresh_status_cache(self, tenant_name):
if (tenant_name not in self.cache or
(time.time() - self.cache_time) > self.cache_expiry):
try:
self.cache[tenant_name] = self.scheduler.formatStatusJSON(
tenant_name)
# Call time.time() again because formatting above may take
# longer than the cache timeout.
self.cache_time = time.time()
except Exception:
self.log.exception("Exception formatting status:")
raise
def _response_with_status_cache(self, func, tenant_name):
self._refresh_status_cache(tenant_name)
response = func()
response.headers['Access-Control-Allow-Origin'] = '*'
response.cache_control.public = True
response.cache_control.max_age = self.cache_expiry
response.last_modified = self.cache_time
response.expires = self.cache_time + self.cache_expiry
return response.conditional_response_app