2012-08-07 21:11:29 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# Copyright (C) 2012 OpenStack, LLC.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
|
|
|
|
# Manage jobs in Jenkins server
|
|
|
|
|
2014-04-26 16:57:31 +00:00
|
|
|
import errno
|
2014-11-10 13:07:55 +00:00
|
|
|
import hashlib
|
2015-05-15 11:52:29 +00:00
|
|
|
import io
|
2014-11-10 13:07:55 +00:00
|
|
|
import logging
|
2014-06-04 17:35:13 +00:00
|
|
|
import operator
|
2014-11-10 13:07:55 +00:00
|
|
|
import os
|
2014-10-19 22:18:52 +00:00
|
|
|
from pprint import pformat
|
2014-11-10 13:07:55 +00:00
|
|
|
import re
|
2016-05-10 21:45:03 +00:00
|
|
|
import tempfile
|
2014-02-21 19:59:24 +00:00
|
|
|
import time
|
2014-11-10 13:07:55 +00:00
|
|
|
import xml.etree.ElementTree as XML
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
import jenkins
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2015-04-22 16:18:45 +00:00
|
|
|
from jenkins_jobs.constants import MAGIC_MANAGE_STRING
|
2016-06-23 05:43:19 +00:00
|
|
|
from jenkins_jobs.parallel import concurrent
|
2015-05-15 11:52:29 +00:00
|
|
|
from jenkins_jobs import utils
|
2014-06-20 15:27:05 +00:00
|
|
|
|
2016-01-04 04:55:30 +00:00
|
|
|
__all__ = [
|
|
|
|
"Jenkins"
|
|
|
|
]
|
2014-11-10 13:07:55 +00:00
|
|
|
|
2015-04-22 16:18:45 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-06-20 15:27:05 +00:00
|
|
|
|
2015-06-25 23:58:09 +00:00
|
|
|
_DEFAULT_TIMEOUT = object()
|
|
|
|
|
2014-06-20 15:27:05 +00:00
|
|
|
|
2012-08-07 21:11:29 +00:00
|
|
|
class CacheStorage(object):
|
2014-08-29 16:22:22 +00:00
|
|
|
# ensure each instance of the class has a reference to the required
|
|
|
|
# modules so that they are available to be used when the destructor
|
|
|
|
# is being called since python will not guarantee that it won't have
|
|
|
|
# removed global module references during teardown.
|
|
|
|
_logger = logger
|
2016-05-10 21:45:03 +00:00
|
|
|
_os = os
|
|
|
|
_tempfile = tempfile
|
|
|
|
_yaml = yaml
|
2014-08-29 16:22:22 +00:00
|
|
|
|
2013-08-07 15:22:14 +00:00
|
|
|
def __init__(self, jenkins_url, flush=False):
|
2012-11-21 18:44:23 +00:00
|
|
|
cache_dir = self.get_cache_dir()
|
2013-03-14 05:47:11 +00:00
|
|
|
# One cache per remote Jenkins URL:
|
|
|
|
host_vary = re.sub('[^A-Za-z0-9\-\~]', '_', jenkins_url)
|
|
|
|
self.cachefilename = os.path.join(
|
|
|
|
cache_dir, 'cache-host-jobs-' + host_vary + '.yml')
|
2013-08-07 15:22:14 +00:00
|
|
|
if flush or not os.path.isfile(self.cachefilename):
|
2012-08-10 23:45:00 +00:00
|
|
|
self.data = {}
|
2014-08-29 16:22:22 +00:00
|
|
|
else:
|
2015-05-15 11:52:29 +00:00
|
|
|
with io.open(self.cachefilename, 'r', encoding='utf-8') as yfile:
|
2014-08-29 16:22:22 +00:00
|
|
|
self.data = yaml.load(yfile)
|
2013-01-25 11:13:00 +00:00
|
|
|
logger.debug("Using cache: '{0}'".format(self.cachefilename))
|
2012-08-10 23:45:00 +00:00
|
|
|
|
2012-11-21 18:44:23 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_cache_dir():
|
|
|
|
home = os.path.expanduser('~')
|
|
|
|
if home == '~':
|
|
|
|
raise OSError('Could not locate home folder')
|
|
|
|
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
|
|
|
|
os.path.join(home, '.cache')
|
|
|
|
path = os.path.join(xdg_cache_home, 'jenkins_jobs')
|
|
|
|
if not os.path.isdir(path):
|
2015-10-08 15:58:23 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as ose:
|
|
|
|
# it could happen that two jjb instances are running at the
|
|
|
|
# same time and that the other instance created the directory
|
|
|
|
# after we made the check, in which case there is no error
|
|
|
|
if ose.errno != errno.EEXIST:
|
|
|
|
raise ose
|
2012-11-21 18:44:23 +00:00
|
|
|
return path
|
|
|
|
|
2012-08-10 23:45:00 +00:00
|
|
|
def set(self, job, md5):
|
|
|
|
self.data[job] = md5
|
|
|
|
|
2015-10-09 22:47:03 +00:00
|
|
|
def clear(self):
|
|
|
|
self.data.clear()
|
|
|
|
|
2012-08-10 23:45:00 +00:00
|
|
|
def is_cached(self, job):
|
|
|
|
if job in self.data:
|
2012-08-07 21:11:29 +00:00
|
|
|
return True
|
2012-08-10 23:45:00 +00:00
|
|
|
return False
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2012-08-10 23:45:00 +00:00
|
|
|
def has_changed(self, job, md5):
|
|
|
|
if job in self.data and self.data[job] == md5:
|
2012-08-07 21:11:29 +00:00
|
|
|
return False
|
2012-08-10 23:45:00 +00:00
|
|
|
return True
|
|
|
|
|
2014-08-29 16:22:22 +00:00
|
|
|
def save(self):
|
2016-05-10 21:45:03 +00:00
|
|
|
# use self references to required modules in case called via __del__
|
|
|
|
# write to tempfile under same directory and then replace to avoid
|
|
|
|
# issues around corruption such the process be killed
|
|
|
|
tfile = self._tempfile.NamedTemporaryFile(dir=self.get_cache_dir(),
|
|
|
|
delete=False)
|
2016-05-25 18:13:45 +00:00
|
|
|
tfile.write(self._yaml.dump(self.data).encode('utf-8'))
|
2016-05-10 21:45:03 +00:00
|
|
|
# force contents to be synced on disk before overwriting cachefile
|
|
|
|
tfile.flush()
|
|
|
|
self._os.fsync(tfile.fileno())
|
|
|
|
tfile.close()
|
2016-06-09 09:34:18 +00:00
|
|
|
try:
|
|
|
|
self._os.rename(tfile.name, self.cachefilename)
|
|
|
|
except OSError:
|
|
|
|
# On Windows, if dst already exists, OSError will be raised even if
|
|
|
|
# it is a file. Remove the file first in that case and try again.
|
|
|
|
self._os.remove(self.cachefilename)
|
|
|
|
self._os.rename(tfile.name, self.cachefilename)
|
2016-05-10 21:45:03 +00:00
|
|
|
|
|
|
|
self._logger.debug("Cache written out to '%s'" % self.cachefilename)
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
# check we initialized sufficiently in case called
|
2014-08-29 16:22:22 +00:00
|
|
|
# due to an exception occurring in the __init__
|
|
|
|
if getattr(self, 'data', None) is not None:
|
|
|
|
try:
|
2016-05-10 21:45:03 +00:00
|
|
|
self.save()
|
2014-08-29 16:22:22 +00:00
|
|
|
except Exception as e:
|
|
|
|
self._logger.error("Failed to write to cache file '%s' on "
|
|
|
|
"exit: %s" % (self.cachefilename, e))
|
|
|
|
|
2012-08-07 21:11:29 +00:00
|
|
|
|
|
|
|
class Jenkins(object):
|
2015-06-25 23:58:09 +00:00
|
|
|
def __init__(self, url, user, password, timeout=_DEFAULT_TIMEOUT):
|
|
|
|
if timeout != _DEFAULT_TIMEOUT:
|
|
|
|
self.jenkins = jenkins.Jenkins(url, user, password, timeout)
|
|
|
|
else:
|
|
|
|
self.jenkins = jenkins.Jenkins(url, user, password)
|
2015-05-08 11:23:02 +00:00
|
|
|
self._jobs = None
|
|
|
|
self._job_list = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def jobs(self):
|
|
|
|
if self._jobs is None:
|
|
|
|
# populate jobs
|
|
|
|
self._jobs = self.jenkins.get_jobs()
|
|
|
|
|
|
|
|
return self._jobs
|
|
|
|
|
|
|
|
@property
|
|
|
|
def job_list(self):
|
|
|
|
if self._job_list is None:
|
|
|
|
self._job_list = set(job['name'] for job in self.jobs)
|
|
|
|
return self._job_list
|
2012-08-10 23:45:00 +00:00
|
|
|
|
|
|
|
def update_job(self, job_name, xml):
|
|
|
|
if self.is_job(job_name):
|
2012-09-10 11:55:38 +00:00
|
|
|
logger.info("Reconfiguring jenkins job {0}".format(job_name))
|
2012-08-10 23:45:00 +00:00
|
|
|
self.jenkins.reconfig_job(job_name, xml)
|
|
|
|
else:
|
2012-09-10 11:55:38 +00:00
|
|
|
logger.info("Creating jenkins job {0}".format(job_name))
|
2012-08-10 23:45:00 +00:00
|
|
|
self.jenkins.create_job(job_name, xml)
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2012-08-10 23:45:00 +00:00
|
|
|
def is_job(self, job_name):
|
2015-05-08 11:23:02 +00:00
|
|
|
# first use cache
|
|
|
|
if job_name in self.job_list:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# if not exists, use jenkins
|
2012-08-10 23:45:00 +00:00
|
|
|
return self.jenkins.job_exists(job_name)
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2012-08-10 23:45:00 +00:00
|
|
|
def get_job_md5(self, job_name):
|
|
|
|
xml = self.jenkins.get_job_config(job_name)
|
2016-02-22 11:50:47 +00:00
|
|
|
return hashlib.md5(xml.encode('utf-8')).hexdigest()
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2012-08-10 23:45:00 +00:00
|
|
|
def delete_job(self, job_name):
|
|
|
|
if self.is_job(job_name):
|
2013-09-28 19:00:05 +00:00
|
|
|
logger.info("Deleting jenkins job {0}".format(job_name))
|
2012-08-10 23:45:00 +00:00
|
|
|
self.jenkins.delete_job(job_name)
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2015-07-07 17:23:39 +00:00
|
|
|
def delete_all_jobs(self):
|
|
|
|
# execute a groovy script to delete all jobs is much faster than
|
|
|
|
# using the doDelete REST endpoint to delete one job at a time.
|
2016-02-04 00:22:43 +00:00
|
|
|
script = ('for(job in jenkins.model.Jenkins.theInstance.getAllItems())'
|
2015-07-07 17:23:39 +00:00
|
|
|
' { job.delete(); }')
|
|
|
|
self.jenkins.run_script(script)
|
|
|
|
|
2014-11-14 23:22:24 +00:00
|
|
|
def get_plugins_info(self):
|
|
|
|
""" Return a list of plugin_info dicts, one for each plugin on the
|
|
|
|
Jenkins instance.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
plugins_list = self.jenkins.get_plugins_info()
|
|
|
|
except jenkins.JenkinsException as e:
|
|
|
|
if re.search("Connection refused", str(e)):
|
2016-01-04 05:22:40 +00:00
|
|
|
logger.warning(
|
|
|
|
"Unable to retrieve Jenkins Plugin Info from {0},"
|
|
|
|
" using default empty plugins info list.".format(
|
|
|
|
self.jenkins.server))
|
2014-11-14 23:22:24 +00:00
|
|
|
plugins_list = [{'shortName': '',
|
|
|
|
'version': '',
|
|
|
|
'longName': ''}]
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
logger.debug("Jenkins Plugin Info {0}".format(pformat(plugins_list)))
|
|
|
|
|
|
|
|
return plugins_list
|
|
|
|
|
2015-05-08 11:23:02 +00:00
|
|
|
def get_jobs(self, cache=True):
|
|
|
|
if not cache:
|
|
|
|
self._jobs = None
|
|
|
|
self._job_list = None
|
|
|
|
return self.jobs
|
2012-10-08 06:04:00 +00:00
|
|
|
|
2013-08-30 14:04:26 +00:00
|
|
|
def is_managed(self, job_name):
|
|
|
|
xml = self.jenkins.get_job_config(job_name)
|
|
|
|
try:
|
2013-10-17 01:28:08 +00:00
|
|
|
out = XML.fromstring(xml)
|
2013-08-30 14:04:26 +00:00
|
|
|
description = out.find(".//description").text
|
|
|
|
return description.endswith(MAGIC_MANAGE_STRING)
|
2013-10-17 01:28:08 +00:00
|
|
|
except (TypeError, AttributeError):
|
2013-08-30 14:04:26 +00:00
|
|
|
pass
|
|
|
|
return False
|
|
|
|
|
2012-08-07 21:11:29 +00:00
|
|
|
|
|
|
|
class Builder(object):
|
2015-12-28 07:07:23 +00:00
|
|
|
def __init__(self, jjb_config):
|
|
|
|
self.jenkins = Jenkins(jjb_config.jenkins['url'],
|
|
|
|
jjb_config.jenkins['user'],
|
|
|
|
jjb_config.jenkins['password'],
|
|
|
|
jjb_config.jenkins['timeout'])
|
|
|
|
self.cache = CacheStorage(jjb_config.jenkins['url'],
|
|
|
|
flush=jjb_config.builder['flush_cache'])
|
|
|
|
self._plugins_list = jjb_config.builder['plugins_info']
|
|
|
|
|
|
|
|
self.jjb_config = jjb_config
|
2012-08-07 21:11:29 +00:00
|
|
|
|
2015-02-05 22:06:36 +00:00
|
|
|
@property
|
|
|
|
def plugins_list(self):
|
|
|
|
if self._plugins_list is None:
|
|
|
|
self._plugins_list = self.jenkins.get_plugins_info()
|
|
|
|
return self._plugins_list
|
2014-11-14 23:22:24 +00:00
|
|
|
|
2015-05-06 14:57:01 +00:00
|
|
|
def delete_old_managed(self, keep=None):
|
2013-08-30 14:04:26 +00:00
|
|
|
jobs = self.jenkins.get_jobs()
|
2015-03-06 20:43:54 +00:00
|
|
|
deleted_jobs = 0
|
2013-08-30 14:04:26 +00:00
|
|
|
for job in jobs:
|
2015-09-21 14:36:10 +00:00
|
|
|
if job['name'] not in keep:
|
|
|
|
if self.jenkins.is_managed(job['name']):
|
|
|
|
logger.info("Removing obsolete jenkins job {0}"
|
|
|
|
.format(job['name']))
|
2016-01-02 07:24:38 +00:00
|
|
|
self.delete_job([job['name']])
|
2015-09-21 14:36:10 +00:00
|
|
|
deleted_jobs += 1
|
|
|
|
else:
|
|
|
|
logger.info("Not deleting unmanaged jenkins job %s",
|
|
|
|
job['name'])
|
2013-10-17 01:28:08 +00:00
|
|
|
else:
|
2015-09-21 14:36:10 +00:00
|
|
|
logger.debug("Keeping job %s", job['name'])
|
2015-03-06 20:43:54 +00:00
|
|
|
return deleted_jobs
|
2013-08-30 14:04:26 +00:00
|
|
|
|
2016-01-02 07:24:38 +00:00
|
|
|
def delete_job(self, jobs):
|
2014-11-11 22:36:34 +00:00
|
|
|
if jobs is not None:
|
|
|
|
logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
|
2013-08-07 16:55:13 +00:00
|
|
|
for job in jobs:
|
|
|
|
self.jenkins.delete_job(job)
|
2015-05-05 18:07:37 +00:00
|
|
|
if(self.cache.is_cached(job)):
|
2013-08-07 16:55:13 +00:00
|
|
|
self.cache.set(job, '')
|
2014-02-21 19:59:24 +00:00
|
|
|
self.cache.save()
|
2013-08-07 16:55:13 +00:00
|
|
|
|
2012-10-08 06:04:00 +00:00
|
|
|
def delete_all_jobs(self):
|
|
|
|
jobs = self.jenkins.get_jobs()
|
2015-02-02 19:02:26 +00:00
|
|
|
logger.info("Number of jobs to delete: %d", len(jobs))
|
2015-07-07 17:23:39 +00:00
|
|
|
self.jenkins.delete_all_jobs()
|
2015-10-09 22:47:03 +00:00
|
|
|
# Need to clear the JJB cache after deletion
|
|
|
|
self.cache.clear()
|
2012-10-08 06:04:00 +00:00
|
|
|
|
2014-02-21 19:59:24 +00:00
|
|
|
def changed(self, job):
|
|
|
|
md5 = job.md5()
|
2015-12-28 07:07:23 +00:00
|
|
|
|
|
|
|
changed = (self.jjb_config.builder['ignore_cache'] or
|
|
|
|
self.cache.has_changed(job.name, md5))
|
2014-02-21 19:59:24 +00:00
|
|
|
if not changed:
|
|
|
|
logger.debug("'{0}' has not changed".format(job.name))
|
|
|
|
return changed
|
|
|
|
|
2016-01-01 02:11:12 +00:00
|
|
|
def update_jobs(self, xml_jobs, output=None, n_workers=None):
|
2014-02-21 19:59:24 +00:00
|
|
|
orig = time.time()
|
2015-12-31 22:59:16 +00:00
|
|
|
|
2016-01-01 02:11:12 +00:00
|
|
|
logger.info("Number of jobs generated: %d", len(xml_jobs))
|
|
|
|
xml_jobs.sort(key=operator.attrgetter('name'))
|
2013-03-12 11:15:16 +00:00
|
|
|
|
2016-01-01 02:11:12 +00:00
|
|
|
if (output and not hasattr(output, 'write') and
|
|
|
|
not os.path.isdir(output)):
|
2015-03-19 11:41:26 +00:00
|
|
|
logger.info("Creating directory %s" % output)
|
|
|
|
try:
|
|
|
|
os.makedirs(output)
|
|
|
|
except OSError:
|
|
|
|
if not os.path.isdir(output):
|
|
|
|
raise
|
|
|
|
|
2014-02-21 19:59:24 +00:00
|
|
|
if output:
|
2016-01-11 17:58:17 +00:00
|
|
|
# ensure only wrapped once
|
|
|
|
if hasattr(output, 'write'):
|
|
|
|
output = utils.wrap_stream(output)
|
|
|
|
|
2016-01-01 02:11:12 +00:00
|
|
|
for job in xml_jobs:
|
2014-04-02 15:30:14 +00:00
|
|
|
if hasattr(output, 'write'):
|
|
|
|
# `output` is a file-like object
|
2015-03-12 17:14:40 +00:00
|
|
|
logger.info("Job name: %s", job.name)
|
2014-04-02 15:30:14 +00:00
|
|
|
logger.debug("Writing XML to '{0}'".format(output))
|
2014-04-26 16:57:31 +00:00
|
|
|
try:
|
|
|
|
output.write(job.output())
|
|
|
|
except IOError as exc:
|
|
|
|
if exc.errno == errno.EPIPE:
|
|
|
|
# EPIPE could happen if piping output to something
|
|
|
|
# that doesn't read the whole input (e.g.: the UNIX
|
|
|
|
# `head` command)
|
|
|
|
return
|
|
|
|
raise
|
2012-08-07 21:11:29 +00:00
|
|
|
continue
|
2014-04-02 15:30:14 +00:00
|
|
|
|
2015-03-19 11:41:26 +00:00
|
|
|
output_fn = os.path.join(output, job.name)
|
2014-04-02 15:30:14 +00:00
|
|
|
logger.debug("Writing XML to '{0}'".format(output_fn))
|
2015-05-15 11:52:29 +00:00
|
|
|
with io.open(output_fn, 'w', encoding='utf-8') as f:
|
|
|
|
f.write(job.output().decode('utf-8'))
|
2016-01-01 02:11:12 +00:00
|
|
|
return xml_jobs, len(xml_jobs)
|
2014-02-21 19:59:24 +00:00
|
|
|
|
|
|
|
# Filter out the jobs that did not change
|
|
|
|
logging.debug('Filtering %d jobs for changed jobs',
|
2016-01-01 02:11:12 +00:00
|
|
|
len(xml_jobs))
|
2014-02-21 19:59:24 +00:00
|
|
|
step = time.time()
|
2016-01-01 02:11:12 +00:00
|
|
|
jobs = [job for job in xml_jobs
|
2014-02-21 19:59:24 +00:00
|
|
|
if self.changed(job)]
|
|
|
|
logging.debug("Filtered for changed jobs in %ss",
|
|
|
|
(time.time() - step))
|
|
|
|
|
|
|
|
if not jobs:
|
|
|
|
return [], 0
|
|
|
|
|
|
|
|
# Update the jobs
|
|
|
|
logging.debug('Updating jobs')
|
|
|
|
step = time.time()
|
|
|
|
p_params = [{'job': job} for job in jobs]
|
|
|
|
results = self.parallel_update_job(
|
|
|
|
n_workers=n_workers,
|
2016-06-23 05:43:19 +00:00
|
|
|
concurrent=p_params)
|
2014-02-21 19:59:24 +00:00
|
|
|
logging.debug("Parsing results")
|
2016-06-23 05:43:19 +00:00
|
|
|
# generalize the result parsing, as a concurrent job always returns a
|
2014-02-21 19:59:24 +00:00
|
|
|
# list
|
|
|
|
if len(p_params) in (1, 0):
|
|
|
|
results = [results]
|
|
|
|
for result in results:
|
|
|
|
if isinstance(result, Exception):
|
|
|
|
raise result
|
2013-01-25 11:13:00 +00:00
|
|
|
else:
|
2014-02-21 19:59:24 +00:00
|
|
|
# update in-memory cache
|
|
|
|
j_name, j_md5 = result
|
|
|
|
self.cache.set(j_name, j_md5)
|
|
|
|
# write cache to disk
|
|
|
|
self.cache.save()
|
|
|
|
logging.debug("Updated %d jobs in %ss",
|
|
|
|
len(jobs),
|
|
|
|
time.time() - step)
|
|
|
|
logging.debug("Total run took %ss", (time.time() - orig))
|
|
|
|
return jobs, len(jobs)
|
|
|
|
|
2016-06-23 05:43:19 +00:00
|
|
|
@concurrent
|
2014-02-21 19:59:24 +00:00
|
|
|
def parallel_update_job(self, job):
|
|
|
|
self.jenkins.update_job(job.name, job.output().decode('utf-8'))
|
|
|
|
return (job.name, job.md5())
|
|
|
|
|
|
|
|
def update_job(self, input_fn, jobs_glob=None, output=None):
|
|
|
|
logging.warn('Current update_job function signature is deprecated and '
|
|
|
|
'will change in future versions to the signature of the '
|
|
|
|
'new parallel_update_job')
|
|
|
|
return self.update_jobs(input_fn, jobs_glob, output)
|