2012-08-07 14:11:29 -07:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# Copyright (C) 2012 OpenStack, LLC.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
|
|
|
|
# Manage jobs in Jenkins server
|
|
|
|
|
2014-04-26 09:57:31 -07:00
|
|
|
import errno
|
2012-08-07 14:11:29 -07:00
|
|
|
import os
|
2014-06-04 10:35:13 -07:00
|
|
|
import operator
|
2013-12-21 20:58:05 +11:00
|
|
|
import sys
|
2012-08-07 14:11:29 -07:00
|
|
|
import hashlib
|
|
|
|
import yaml
|
|
|
|
import xml.etree.ElementTree as XML
|
2014-05-08 23:43:26 +01:00
|
|
|
import xml
|
2012-08-07 14:11:29 -07:00
|
|
|
from xml.dom import minidom
|
|
|
|
import jenkins
|
|
|
|
import re
|
2014-10-20 09:18:52 +11:00
|
|
|
from pprint import pformat
|
2012-09-10 12:55:38 +01:00
|
|
|
import logging
|
2014-02-21 20:59:24 +01:00
|
|
|
import time
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2015-04-22 09:18:45 -07:00
|
|
|
from jenkins_jobs.constants import MAGIC_MANAGE_STRING
|
2014-02-21 20:59:24 +01:00
|
|
|
from jenkins_jobs.parallel import parallelize
|
2015-04-22 09:18:45 -07:00
|
|
|
from jenkins_jobs.parser import YamlParser
|
2014-06-20 17:27:05 +02:00
|
|
|
|
2015-04-22 09:18:45 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-06-20 17:27:05 +02:00
|
|
|
|
|
|
|
|
2014-05-08 23:43:26 +01:00
|
|
|
# Python 2.6's minidom toprettyxml produces broken output by adding extraneous
|
|
|
|
# whitespace around data. This patches the broken implementation with one taken
|
2014-10-06 16:59:08 +02:00
|
|
|
# from Python > 2.7.3
|
2013-12-21 20:58:05 +11:00
|
|
|
def writexml(self, writer, indent="", addindent="", newl=""):
|
|
|
|
# indent = current indentation
|
|
|
|
# addindent = indentation to add to higher levels
|
|
|
|
# newl = newline string
|
|
|
|
writer.write(indent + "<" + self.tagName)
|
|
|
|
|
|
|
|
attrs = self._get_attributes()
|
|
|
|
a_names = attrs.keys()
|
|
|
|
a_names.sort()
|
|
|
|
|
|
|
|
for a_name in a_names:
|
|
|
|
writer.write(" %s=\"" % a_name)
|
|
|
|
minidom._write_data(writer, attrs[a_name].value)
|
|
|
|
writer.write("\"")
|
|
|
|
if self.childNodes:
|
|
|
|
writer.write(">")
|
|
|
|
if (len(self.childNodes) == 1 and
|
|
|
|
self.childNodes[0].nodeType == minidom.Node.TEXT_NODE):
|
|
|
|
self.childNodes[0].writexml(writer, '', '', '')
|
|
|
|
else:
|
|
|
|
writer.write(newl)
|
|
|
|
for node in self.childNodes:
|
|
|
|
node.writexml(writer, indent + addindent, addindent, newl)
|
|
|
|
writer.write(indent)
|
|
|
|
writer.write("</%s>%s" % (self.tagName, newl))
|
|
|
|
else:
|
|
|
|
writer.write("/>%s" % (newl))
|
|
|
|
|
2014-05-08 23:43:26 +01:00
|
|
|
# PyXML xml.__name__ is _xmlplus. Check that if we don't have the default
|
|
|
|
# system version of the minidom, then patch the writexml method
|
2014-10-06 16:59:08 +02:00
|
|
|
if sys.version_info[:3] < (2, 7, 3) or xml.__name__ != 'xml':
|
2013-12-21 20:58:05 +11:00
|
|
|
minidom.Element.writexml = writexml
|
|
|
|
|
|
|
|
|
2012-08-07 14:11:29 -07:00
|
|
|
class CacheStorage(object):
|
2014-08-29 17:22:22 +01:00
|
|
|
# ensure each instance of the class has a reference to the required
|
|
|
|
# modules so that they are available to be used when the destructor
|
|
|
|
# is being called since python will not guarantee that it won't have
|
|
|
|
# removed global module references during teardown.
|
|
|
|
_yaml = yaml
|
|
|
|
_logger = logger
|
|
|
|
|
2013-08-07 17:22:14 +02:00
|
|
|
def __init__(self, jenkins_url, flush=False):
|
2012-11-21 19:44:23 +01:00
|
|
|
cache_dir = self.get_cache_dir()
|
2013-03-14 06:47:11 +01:00
|
|
|
# One cache per remote Jenkins URL:
|
|
|
|
host_vary = re.sub('[^A-Za-z0-9\-\~]', '_', jenkins_url)
|
|
|
|
self.cachefilename = os.path.join(
|
|
|
|
cache_dir, 'cache-host-jobs-' + host_vary + '.yml')
|
2013-08-07 17:22:14 +02:00
|
|
|
if flush or not os.path.isfile(self.cachefilename):
|
2012-08-10 16:45:00 -07:00
|
|
|
self.data = {}
|
2014-08-29 17:22:22 +01:00
|
|
|
else:
|
2015-02-05 21:15:45 +01:00
|
|
|
with open(self.cachefilename, 'r') as yfile:
|
2014-08-29 17:22:22 +01:00
|
|
|
self.data = yaml.load(yfile)
|
2013-01-25 11:13:00 +00:00
|
|
|
logger.debug("Using cache: '{0}'".format(self.cachefilename))
|
2012-08-10 16:45:00 -07:00
|
|
|
|
2012-11-21 19:44:23 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_cache_dir():
|
|
|
|
home = os.path.expanduser('~')
|
|
|
|
if home == '~':
|
|
|
|
raise OSError('Could not locate home folder')
|
|
|
|
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
|
|
|
|
os.path.join(home, '.cache')
|
|
|
|
path = os.path.join(xdg_cache_home, 'jenkins_jobs')
|
|
|
|
if not os.path.isdir(path):
|
|
|
|
os.makedirs(path)
|
|
|
|
return path
|
|
|
|
|
2012-08-10 16:45:00 -07:00
|
|
|
def set(self, job, md5):
|
|
|
|
self.data[job] = md5
|
|
|
|
|
|
|
|
def is_cached(self, job):
|
|
|
|
if job in self.data:
|
2012-08-07 14:11:29 -07:00
|
|
|
return True
|
2012-08-10 16:45:00 -07:00
|
|
|
return False
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2012-08-10 16:45:00 -07:00
|
|
|
def has_changed(self, job, md5):
|
|
|
|
if job in self.data and self.data[job] == md5:
|
2012-08-07 14:11:29 -07:00
|
|
|
return False
|
2012-08-10 16:45:00 -07:00
|
|
|
return True
|
|
|
|
|
2014-08-29 17:22:22 +01:00
|
|
|
def save(self):
|
|
|
|
# check we initialized sufficiently in case called via __del__
|
|
|
|
# due to an exception occurring in the __init__
|
|
|
|
if getattr(self, 'data', None) is not None:
|
|
|
|
try:
|
|
|
|
with open(self.cachefilename, 'w') as yfile:
|
|
|
|
self._yaml.dump(self.data, yfile)
|
|
|
|
except Exception as e:
|
|
|
|
self._logger.error("Failed to write to cache file '%s' on "
|
|
|
|
"exit: %s" % (self.cachefilename, e))
|
|
|
|
else:
|
|
|
|
self._logger.info("Cache saved")
|
|
|
|
self._logger.debug("Cache written out to '%s'" %
|
|
|
|
self.cachefilename)
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.save()
|
|
|
|
|
2012-08-07 14:11:29 -07:00
|
|
|
|
|
|
|
class Jenkins(object):
|
2012-08-10 16:45:00 -07:00
|
|
|
def __init__(self, url, user, password):
|
|
|
|
self.jenkins = jenkins.Jenkins(url, user, password)
|
|
|
|
|
2014-02-21 20:59:24 +01:00
|
|
|
@parallelize
|
2012-08-10 16:45:00 -07:00
|
|
|
def update_job(self, job_name, xml):
|
|
|
|
if self.is_job(job_name):
|
2012-09-10 12:55:38 +01:00
|
|
|
logger.info("Reconfiguring jenkins job {0}".format(job_name))
|
2012-08-10 16:45:00 -07:00
|
|
|
self.jenkins.reconfig_job(job_name, xml)
|
|
|
|
else:
|
2012-09-10 12:55:38 +01:00
|
|
|
logger.info("Creating jenkins job {0}".format(job_name))
|
2012-08-10 16:45:00 -07:00
|
|
|
self.jenkins.create_job(job_name, xml)
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2012-08-10 16:45:00 -07:00
|
|
|
def is_job(self, job_name):
|
|
|
|
return self.jenkins.job_exists(job_name)
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2012-08-10 16:45:00 -07:00
|
|
|
def get_job_md5(self, job_name):
|
|
|
|
xml = self.jenkins.get_job_config(job_name)
|
|
|
|
return hashlib.md5(xml).hexdigest()
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2012-08-10 16:45:00 -07:00
|
|
|
def delete_job(self, job_name):
|
|
|
|
if self.is_job(job_name):
|
2013-09-28 21:00:05 +02:00
|
|
|
logger.info("Deleting jenkins job {0}".format(job_name))
|
2012-08-10 16:45:00 -07:00
|
|
|
self.jenkins.delete_job(job_name)
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2014-11-14 15:22:24 -08:00
|
|
|
def get_plugins_info(self):
|
|
|
|
""" Return a list of plugin_info dicts, one for each plugin on the
|
|
|
|
Jenkins instance.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
plugins_list = self.jenkins.get_plugins_info()
|
|
|
|
except jenkins.JenkinsException as e:
|
|
|
|
if re.search("Connection refused", str(e)):
|
|
|
|
logger.warn("Unable to retrieve Jenkins Plugin Info from {0},"
|
|
|
|
" using default empty plugins info list.".format(
|
|
|
|
self.jenkins.server))
|
|
|
|
plugins_list = [{'shortName': '',
|
|
|
|
'version': '',
|
|
|
|
'longName': ''}]
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
logger.debug("Jenkins Plugin Info {0}".format(pformat(plugins_list)))
|
|
|
|
|
|
|
|
return plugins_list
|
|
|
|
|
2012-10-08 14:04:00 +08:00
|
|
|
def get_jobs(self):
|
|
|
|
return self.jenkins.get_jobs()
|
|
|
|
|
2013-08-30 16:04:26 +02:00
|
|
|
def is_managed(self, job_name):
|
|
|
|
xml = self.jenkins.get_job_config(job_name)
|
|
|
|
try:
|
2013-10-16 21:28:08 -04:00
|
|
|
out = XML.fromstring(xml)
|
2013-08-30 16:04:26 +02:00
|
|
|
description = out.find(".//description").text
|
|
|
|
return description.endswith(MAGIC_MANAGE_STRING)
|
2013-10-16 21:28:08 -04:00
|
|
|
except (TypeError, AttributeError):
|
2013-08-30 16:04:26 +02:00
|
|
|
pass
|
|
|
|
return False
|
|
|
|
|
2012-08-07 14:11:29 -07:00
|
|
|
|
|
|
|
class Builder(object):
|
2012-09-19 08:52:35 +01:00
|
|
|
def __init__(self, jenkins_url, jenkins_user, jenkins_password,
|
2014-11-14 15:22:24 -08:00
|
|
|
config=None, ignore_cache=False, flush_cache=False,
|
|
|
|
plugins_list=None):
|
2012-08-07 14:11:29 -07:00
|
|
|
self.jenkins = Jenkins(jenkins_url, jenkins_user, jenkins_password)
|
2013-08-07 17:22:14 +02:00
|
|
|
self.cache = CacheStorage(jenkins_url, flush=flush_cache)
|
2012-09-19 08:52:35 +01:00
|
|
|
self.global_config = config
|
2013-08-07 17:22:14 +02:00
|
|
|
self.ignore_cache = ignore_cache
|
2015-02-05 23:06:36 +01:00
|
|
|
self._plugins_list = plugins_list
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2015-02-05 23:06:36 +01:00
|
|
|
@property
|
|
|
|
def plugins_list(self):
|
|
|
|
if self._plugins_list is None:
|
|
|
|
self._plugins_list = self.jenkins.get_plugins_info()
|
|
|
|
return self._plugins_list
|
2014-11-14 15:22:24 -08:00
|
|
|
|
2013-08-07 18:55:13 +02:00
|
|
|
def load_files(self, fn):
|
2014-11-14 15:22:24 -08:00
|
|
|
self.parser = YamlParser(self.global_config, self.plugins_list)
|
2014-04-03 13:45:04 -07:00
|
|
|
|
2014-04-03 18:42:31 +02:00
|
|
|
# handle deprecated behavior
|
|
|
|
if not hasattr(fn, '__iter__'):
|
|
|
|
logger.warning(
|
|
|
|
'Passing single elements for the `fn` argument in '
|
|
|
|
'Builder.load_files is deprecated. Please update your code '
|
|
|
|
'to use a list as support for automatic conversion will be '
|
|
|
|
'removed in a future version.')
|
|
|
|
fn = [fn]
|
|
|
|
|
|
|
|
files_to_process = []
|
|
|
|
for path in fn:
|
|
|
|
if os.path.isdir(path):
|
|
|
|
files_to_process.extend([os.path.join(path, f)
|
|
|
|
for f in os.listdir(path)
|
|
|
|
if (f.endswith('.yml')
|
|
|
|
or f.endswith('.yaml'))])
|
|
|
|
else:
|
|
|
|
files_to_process.append(path)
|
2014-04-03 13:45:04 -07:00
|
|
|
|
2014-11-10 13:59:13 +00:00
|
|
|
# symlinks used to allow loading of sub-dirs can result in duplicate
|
|
|
|
# definitions of macros and templates when loading all from top-level
|
|
|
|
unique_files = []
|
|
|
|
for f in files_to_process:
|
|
|
|
rpf = os.path.realpath(f)
|
|
|
|
if rpf not in unique_files:
|
|
|
|
unique_files.append(rpf)
|
|
|
|
else:
|
2015-03-05 07:32:12 -08:00
|
|
|
logger.warning("File '%s' already added as '%s', ignoring "
|
2014-11-10 13:59:13 +00:00
|
|
|
"reference to avoid duplicating yaml "
|
|
|
|
"definitions." % (f, rpf))
|
|
|
|
|
|
|
|
for in_file in unique_files:
|
2014-04-03 18:42:31 +02:00
|
|
|
# use of ask-for-permissions instead of ask-for-forgiveness
|
|
|
|
# performs better when low use cases.
|
|
|
|
if hasattr(in_file, 'name'):
|
|
|
|
fname = in_file.name
|
|
|
|
else:
|
|
|
|
fname = in_file
|
|
|
|
logger.debug("Parsing YAML file {0}".format(fname))
|
|
|
|
if hasattr(in_file, 'read'):
|
|
|
|
self.parser.parse_fp(in_file)
|
|
|
|
else:
|
|
|
|
self.parser.parse(in_file)
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2013-08-30 16:04:26 +02:00
|
|
|
def delete_old_managed(self, keep):
|
|
|
|
jobs = self.jenkins.get_jobs()
|
2015-03-06 12:43:54 -08:00
|
|
|
deleted_jobs = 0
|
2013-08-30 16:04:26 +02:00
|
|
|
for job in jobs:
|
|
|
|
if job['name'] not in keep and \
|
|
|
|
self.jenkins.is_managed(job['name']):
|
|
|
|
logger.info("Removing obsolete jenkins job {0}"
|
|
|
|
.format(job['name']))
|
|
|
|
self.delete_job(job['name'])
|
2015-03-06 12:43:54 -08:00
|
|
|
deleted_jobs += 1
|
2013-10-16 21:28:08 -04:00
|
|
|
else:
|
|
|
|
logger.debug("Ignoring unmanaged jenkins job %s",
|
|
|
|
job['name'])
|
2015-03-06 12:43:54 -08:00
|
|
|
return deleted_jobs
|
2013-08-30 16:04:26 +02:00
|
|
|
|
2015-01-27 19:21:42 -08:00
|
|
|
def delete_job(self, jobs_glob, fn=None):
|
2013-08-07 18:55:13 +02:00
|
|
|
if fn:
|
|
|
|
self.load_files(fn)
|
2015-04-22 18:27:31 +01:00
|
|
|
self.parser.expandYaml([jobs_glob])
|
|
|
|
jobs = [j['name'] for j in self.parser.jobs]
|
2013-08-07 18:55:13 +02:00
|
|
|
else:
|
2015-01-27 19:21:42 -08:00
|
|
|
jobs = [jobs_glob]
|
2014-11-11 14:36:34 -08:00
|
|
|
|
|
|
|
if jobs is not None:
|
|
|
|
logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
|
2013-08-07 18:55:13 +02:00
|
|
|
for job in jobs:
|
|
|
|
self.jenkins.delete_job(job)
|
2014-02-21 20:59:24 +01:00
|
|
|
if self.cache.is_cached(job):
|
2013-08-07 18:55:13 +02:00
|
|
|
self.cache.set(job, '')
|
2014-02-21 20:59:24 +01:00
|
|
|
self.cache.save()
|
2013-08-07 18:55:13 +02:00
|
|
|
|
2012-10-08 14:04:00 +08:00
|
|
|
def delete_all_jobs(self):
|
|
|
|
jobs = self.jenkins.get_jobs()
|
2015-02-02 11:02:26 -08:00
|
|
|
logger.info("Number of jobs to delete: %d", len(jobs))
|
2012-10-08 14:04:00 +08:00
|
|
|
for job in jobs:
|
|
|
|
self.delete_job(job['name'])
|
|
|
|
|
2014-02-21 20:59:24 +01:00
|
|
|
@parallelize
|
|
|
|
def changed(self, job):
|
|
|
|
md5 = job.md5()
|
|
|
|
changed = self.ignore_cache or self.cache.has_changed(job.name, md5)
|
|
|
|
if not changed:
|
|
|
|
logger.debug("'{0}' has not changed".format(job.name))
|
|
|
|
return changed
|
|
|
|
|
|
|
|
def update_jobs(self, input_fn, jobs_glob=None, output=None,
|
|
|
|
n_workers=None):
|
|
|
|
orig = time.time()
|
2014-04-02 08:30:14 -07:00
|
|
|
self.load_files(input_fn)
|
2015-01-27 19:21:42 -08:00
|
|
|
self.parser.expandYaml(jobs_glob)
|
2014-07-18 00:44:08 +10:00
|
|
|
self.parser.generateXML()
|
2014-02-21 20:59:24 +01:00
|
|
|
step = time.time()
|
|
|
|
logging.debug('%d XML files generated in %ss',
|
|
|
|
len(self.parser.jobs), str(step - orig))
|
2012-08-07 14:11:29 -07:00
|
|
|
|
2015-02-02 11:02:26 -08:00
|
|
|
logger.info("Number of jobs generated: %d", len(self.parser.xml_jobs))
|
2014-07-18 00:44:08 +10:00
|
|
|
self.parser.xml_jobs.sort(key=operator.attrgetter('name'))
|
2013-03-12 12:15:16 +01:00
|
|
|
|
2015-03-19 12:41:26 +01:00
|
|
|
if (output and not hasattr(output, 'write')
|
|
|
|
and not os.path.isdir(output)):
|
|
|
|
logger.info("Creating directory %s" % output)
|
|
|
|
try:
|
|
|
|
os.makedirs(output)
|
|
|
|
except OSError:
|
|
|
|
if not os.path.isdir(output):
|
|
|
|
raise
|
|
|
|
|
2014-02-21 20:59:24 +01:00
|
|
|
if output:
|
|
|
|
for job in self.parser.xml_jobs:
|
2014-04-02 08:30:14 -07:00
|
|
|
if hasattr(output, 'write'):
|
|
|
|
# `output` is a file-like object
|
2015-03-12 10:14:40 -07:00
|
|
|
logger.info("Job name: %s", job.name)
|
2014-04-02 08:30:14 -07:00
|
|
|
logger.debug("Writing XML to '{0}'".format(output))
|
2014-04-26 09:57:31 -07:00
|
|
|
try:
|
|
|
|
output.write(job.output())
|
|
|
|
except IOError as exc:
|
|
|
|
if exc.errno == errno.EPIPE:
|
|
|
|
# EPIPE could happen if piping output to something
|
|
|
|
# that doesn't read the whole input (e.g.: the UNIX
|
|
|
|
# `head` command)
|
|
|
|
return
|
|
|
|
raise
|
2012-08-07 14:11:29 -07:00
|
|
|
continue
|
2014-04-02 08:30:14 -07:00
|
|
|
|
2015-03-19 12:41:26 +01:00
|
|
|
output_fn = os.path.join(output, job.name)
|
2014-04-02 08:30:14 -07:00
|
|
|
logger.debug("Writing XML to '{0}'".format(output_fn))
|
|
|
|
f = open(output_fn, 'w')
|
2012-08-07 14:11:29 -07:00
|
|
|
f.write(job.output())
|
|
|
|
f.close()
|
2014-02-21 20:59:24 +01:00
|
|
|
return self.parser.xml_jobs, len(self.parser.xml_jobs)
|
|
|
|
|
|
|
|
# Filter out the jobs that did not change
|
|
|
|
logging.debug('Filtering %d jobs for changed jobs',
|
|
|
|
len(self.parser.xml_jobs))
|
|
|
|
step = time.time()
|
|
|
|
jobs = [job for job in self.parser.xml_jobs
|
|
|
|
if self.changed(job)]
|
|
|
|
logging.debug("Filtered for changed jobs in %ss",
|
|
|
|
(time.time() - step))
|
|
|
|
|
|
|
|
if not jobs:
|
|
|
|
return [], 0
|
|
|
|
|
|
|
|
# Update the jobs
|
|
|
|
logging.debug('Updating jobs')
|
|
|
|
step = time.time()
|
|
|
|
p_params = [{'job': job} for job in jobs]
|
|
|
|
results = self.parallel_update_job(
|
|
|
|
n_workers=n_workers,
|
|
|
|
parallelize=p_params)
|
|
|
|
logging.debug("Parsing results")
|
|
|
|
# generalize the result parsing, as a parallelized job always returns a
|
|
|
|
# list
|
|
|
|
if len(p_params) in (1, 0):
|
|
|
|
results = [results]
|
|
|
|
for result in results:
|
|
|
|
if isinstance(result, Exception):
|
|
|
|
raise result
|
2013-01-25 11:13:00 +00:00
|
|
|
else:
|
2014-02-21 20:59:24 +01:00
|
|
|
# update in-memory cache
|
|
|
|
j_name, j_md5 = result
|
|
|
|
self.cache.set(j_name, j_md5)
|
|
|
|
# write cache to disk
|
|
|
|
self.cache.save()
|
|
|
|
logging.debug("Updated %d jobs in %ss",
|
|
|
|
len(jobs),
|
|
|
|
time.time() - step)
|
|
|
|
logging.debug("Total run took %ss", (time.time() - orig))
|
|
|
|
return jobs, len(jobs)
|
|
|
|
|
|
|
|
@parallelize
|
|
|
|
def parallel_update_job(self, job):
|
|
|
|
self.jenkins.update_job(job.name, job.output())
|
|
|
|
return (job.name, job.md5())
|
|
|
|
|
|
|
|
def update_job(self, input_fn, jobs_glob=None, output=None):
|
|
|
|
logging.warn('Current update_job function signature is deprecated and '
|
|
|
|
'will change in future versions to the signature of the '
|
|
|
|
'new parallel_update_job')
|
|
|
|
return self.update_jobs(input_fn, jobs_glob, output)
|