Auto-generated output from python-black

Please review the following patch containing the code changes in
the repo. This patch is a transition patch and is the auto-generated
output of the python-black tool.

Change-Id: I2d2de71da8a105fb62b561899ae78441ddab4032
Signed-off-by: Thanh Ha <zxiiro@gmail.com>
This commit is contained in:
Thanh Ha 2019-08-23 09:14:39 -04:00 committed by Sorin Sbarnea
parent ead185134d
commit 4d90c187a9
89 changed files with 10446 additions and 9903 deletions

View File

@ -18,8 +18,8 @@ from jenkins_jobs.version import version_info as jenkins_jobs_version
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
sys.path.insert(0, os.path.abspath('../../jenkins_jobs/modules'))
sys.path.insert(0, os.path.abspath("../.."))
sys.path.insert(0, os.path.abspath("../../jenkins_jobs/modules"))
# -- General configuration ----------------------------------------------------
@ -28,25 +28,30 @@ sys.path.insert(0, os.path.abspath('../../jenkins_jobs/modules'))
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage',
'jenkins_jobs.sphinx.yaml', 'sphinxcontrib.programoutput',
'sphinx.ext.extlinks', 'sphinx.ext.doctest']
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"jenkins_jobs.sphinx.yaml",
"sphinxcontrib.programoutput",
"sphinx.ext.extlinks",
"sphinx.ext.doctest",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'Jenkins Job Builder'
copyright = u'2012, Jenkins Job Builder Maintainers'
project = u"Jenkins Job Builder"
copyright = u"2012, Jenkins Job Builder Maintainers"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -86,7 +91,7 @@ exclude_patterns = []
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@ -96,7 +101,7 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@ -169,7 +174,7 @@ html_theme = 'default'
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'JenkinsJobBuilderdoc'
htmlhelp_basename = "JenkinsJobBuilderdoc"
# -- Options for LaTeX output -------------------------------------------------
@ -177,10 +182,8 @@ htmlhelp_basename = 'JenkinsJobBuilderdoc'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
@ -189,8 +192,13 @@ latex_elements = {
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'JenkinsJobBuilder.tex', u'Jenkins Job Builder Documentation',
u'Jenkins Job Builder Maintainers', 'manual'),
(
"index",
"JenkinsJobBuilder.tex",
u"Jenkins Job Builder Documentation",
u"Jenkins Job Builder Maintainers",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
@ -225,8 +233,13 @@ linkcheck_timeout = 15
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jenkins-jobs', u'Jenkins Job Builder Documentation',
[u'Jenkins Job Builder Maintainers'], 1)
(
"index",
"jenkins-jobs",
u"Jenkins Job Builder Documentation",
[u"Jenkins Job Builder Maintainers"],
1,
)
]
# If true, show URL addresses after external links.
@ -239,10 +252,15 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'JenkinsJobBuilder', u'Jenkins Job Builder Documentation',
u'Jenkins Job Builder Maintainers',
'JenkinsJobBuilder', 'One line description of project.',
'Miscellaneous'),
(
"index",
"JenkinsJobBuilder",
u"Jenkins Job Builder Documentation",
u"Jenkins Job Builder Maintainers",
"JenkinsJobBuilder",
"One line description of project.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
@ -254,6 +272,7 @@ texinfo_documents = [
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
extlinks = {'jenkins-wiki': ('https://wiki.jenkins.io/display/JENKINS/%s',
None),
'jenkins-plugins': ('https://plugins.jenkins.io/%s', None)}
extlinks = {
"jenkins-wiki": ("https://wiki.jenkins.io/display/JENKINS/%s", None),
"jenkins-plugins": ("https://plugins.jenkins.io/%s", None),
}

View File

@ -42,9 +42,9 @@ def getchunk(item):
# Subtract the matched portion from the original string
# if there was a match, otherwise set it to ""
item = (item[itemchunk.end():] if itemchunk else "")
item = item[itemchunk.end() :] if itemchunk else ""
# Don't return the match object, just the text
itemchunk = (itemchunk.group() if itemchunk else "")
itemchunk = itemchunk.group() if itemchunk else ""
return (itemchunk, item)
@ -54,28 +54,28 @@ def cmp(a, b):
def alphanum(a, b):
a = a.name if hasattr(a, 'name') else str(a)
b = b.name if hasattr(b, 'name') else str(b)
a = a.name if hasattr(a, "name") else str(a)
b = b.name if hasattr(b, "name") else str(b)
n = 0
while (n == 0):
while n == 0:
# Get a chunk and the original string with the chunk subtracted
(ac, a) = getchunk(a)
(bc, b) = getchunk(b)
# Both items contain only letters
if (re_letters.match(ac) and re_letters.match(bc)):
if re_letters.match(ac) and re_letters.match(bc):
n = cmp(ac, bc)
else:
# Both items contain only numbers
if (re_numbers.match(ac) and re_numbers.match(bc)):
if re_numbers.match(ac) and re_numbers.match(bc):
n = cmp(int(ac), int(bc))
# item has letters and one item has numbers, or one item is empty
else:
n = cmp(ac, bc)
# Prevent deadlocks
if (n == 0):
if n == 0:
n = 1
return n
@ -105,5 +105,5 @@ class AlphanumSort(object):
if __name__ == "__main__":
mylist = ['a2', 'a1', 'a10', 'a']
assert sorted(mylist, key=AlphanumSort) == ['a', 'a1', 'a2', 'a10']
mylist = ["a2", "a1", "a10", "a"]
assert sorted(mylist, key=AlphanumSort) == ["a", "a1", "a2", "a10"]

View File

@ -34,9 +34,7 @@ from jenkins_jobs.constants import MAGIC_MANAGE_STRING
from jenkins_jobs.parallel import concurrent
from jenkins_jobs import utils
__all__ = [
"JenkinsManager"
]
__all__ = ["JenkinsManager"]
logger = logging.getLogger(__name__)
@ -44,22 +42,22 @@ _DEFAULT_TIMEOUT = object()
class JenkinsManager(object):
def __init__(self, jjb_config):
url = jjb_config.jenkins['url']
user = jjb_config.jenkins['user']
password = jjb_config.jenkins['password']
timeout = jjb_config.jenkins['timeout']
url = jjb_config.jenkins["url"]
user = jjb_config.jenkins["user"]
password = jjb_config.jenkins["password"]
timeout = jjb_config.jenkins["timeout"]
if timeout != _DEFAULT_TIMEOUT:
self.jenkins = jenkins.Jenkins(url, user, password, timeout)
else:
self.jenkins = jenkins.Jenkins(url, user, password)
self.cache = JobCache(jjb_config.jenkins['url'],
flush=jjb_config.builder['flush_cache'])
self.cache = JobCache(
jjb_config.jenkins["url"], flush=jjb_config.builder["flush_cache"]
)
self._plugins_list = jjb_config.builder['plugins_info']
self._plugins_list = jjb_config.builder["plugins_info"]
self._jobs = None
self._job_list = None
self._views = None
@ -69,16 +67,15 @@ class JenkinsManager(object):
def _setup_output(self, output, item, config_xml=False):
output_dir = output
output_fn = os.path.join(output, item)
if '/' in item:
if "/" in item:
# in item folder
output_fn = os.path.join(output, os.path.normpath(item))
output_dir = os.path.dirname(output_fn)
# if in a folder, re-adding name to the directory here
if config_xml:
output_dir = os.path.join(
output_dir, os.path.basename(item))
output_fn = os.path.join(output_dir, 'config.xml')
output_dir = os.path.join(output_dir, os.path.basename(item))
output_fn = os.path.join(output_dir, "config.xml")
if output_dir != output:
logger.debug("Creating directory %s" % output_dir)
@ -102,36 +99,43 @@ class JenkinsManager(object):
def job_list(self):
if self._job_list is None:
# python-jenkins uses 'fullname' for folder/name combination
self._job_list = set(job['fullname'] for job in self.jobs)
self._job_list = set(job["fullname"] for job in self.jobs)
return self._job_list
def _job_format(self, job_name):
# returns job name or url based on config option
if self._jjb_config.builder['print_job_urls']:
return self._jjb_config.jenkins['url'] + \
'/job/' + quote(
'/job/'.join(job_name.split('/')).encode('utf8')) + '/'
if self._jjb_config.builder["print_job_urls"]:
return (
self._jjb_config.jenkins["url"]
+ "/job/"
+ quote("/job/".join(job_name.split("/")).encode("utf8"))
+ "/"
)
else:
return job_name
def _view_format(self, view_name):
# returns job name or url based on config option
if self._jjb_config.builder['print_job_urls']:
parts = view_name.split('/')
return self._jjb_config.jenkins['url'] + \
''.join(['/job/' + item for item in parts[:-1]]) + \
'/view/' + parts[-1] + '/'
if self._jjb_config.builder["print_job_urls"]:
parts = view_name.split("/")
return (
self._jjb_config.jenkins["url"]
+ "".join(["/job/" + item for item in parts[:-1]])
+ "/view/"
+ parts[-1]
+ "/"
)
else:
return view_name
def update_job(self, job_name, xml):
if self.is_job(job_name):
logger.info("Reconfiguring jenkins job {0}".format(
self._job_format(job_name)))
logger.info(
"Reconfiguring jenkins job {0}".format(self._job_format(job_name))
)
self.jenkins.reconfig_job(job_name, xml)
else:
logger.info("Creating jenkins job {0}".format(
self._job_format(job_name)))
logger.info("Creating jenkins job {0}".format(self._job_format(job_name)))
self.jenkins.create_job(job_name, xml)
def is_job(self, job_name, use_cache=True):
@ -143,7 +147,7 @@ class JenkinsManager(object):
def get_job_md5(self, job_name):
xml = self.jenkins.get_job_config(job_name)
return hashlib.md5(xml.encode('utf-8')).hexdigest()
return hashlib.md5(xml.encode("utf-8")).hexdigest()
def delete_job(self, job_name):
if self.is_job(job_name):
@ -162,10 +166,10 @@ class JenkinsManager(object):
logger.warning(
"Unable to retrieve Jenkins Plugin Info from {0},"
" using default empty plugins info list.".format(
self.jenkins.server))
plugins_list = [{'shortName': '',
'version': '',
'longName': ''}]
self.jenkins.server
)
)
plugins_list = [{"shortName": "", "version": "", "longName": ""}]
else:
raise
logger.debug("Jenkins Plugin Info {0}".format(pformat(plugins_list)))
@ -181,7 +185,7 @@ class JenkinsManager(object):
def is_managed(self, job_name):
xml = self.jenkins.get_job_config(job_name)
try:
out = XML.fromstring(xml.encode('utf-8'))
out = XML.fromstring(xml.encode("utf-8"))
description = out.find(".//description").text
return description.endswith(MAGIC_MANAGE_STRING)
except (TypeError, AttributeError):
@ -202,18 +206,21 @@ class JenkinsManager(object):
for job in jobs:
# python-jenkins stores the folder and name as 'fullname'
# Check if the job was deleted when his parent folder was deleted
if job['fullname'] not in keep and \
self.is_job(job['fullname'], use_cache=False):
if self.is_managed(job['fullname']):
logger.info("Removing obsolete jenkins job {0}"
.format(job['fullname']))
self.delete_job(job['fullname'])
if job["fullname"] not in keep and self.is_job(
job["fullname"], use_cache=False
):
if self.is_managed(job["fullname"]):
logger.info(
"Removing obsolete jenkins job {0}".format(job["fullname"])
)
self.delete_job(job["fullname"])
deleted_jobs += 1
else:
logger.info("Not deleting unmanaged jenkins job %s",
job['fullname'])
logger.info(
"Not deleting unmanaged jenkins job %s", job["fullname"]
)
else:
logger.debug("Keeping job %s", job['fullname'])
logger.debug("Keeping job %s", job["fullname"])
return deleted_jobs
def delete_jobs(self, jobs):
@ -221,15 +228,17 @@ class JenkinsManager(object):
logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
for job in jobs:
self.delete_job(job)
if(self.cache.is_cached(job)):
self.cache.set(job, '')
if self.cache.is_cached(job):
self.cache.set(job, "")
self.cache.save()
def delete_all_jobs(self):
jobs = self.get_jobs()
logger.info("Number of jobs to delete: %d", len(jobs))
script = ('for(job in jenkins.model.Jenkins.theInstance.getAllItems())'
' { job.delete(); }')
script = (
"for(job in jenkins.model.Jenkins.theInstance.getAllItems())"
" { job.delete(); }"
)
self.jenkins.run_script(script)
# Need to clear the JJB cache after deletion
self.cache.clear()
@ -237,8 +246,9 @@ class JenkinsManager(object):
def changed(self, job):
md5 = job.md5()
changed = (self._jjb_config.builder['ignore_cache'] or
self.cache.has_changed(job.name, md5))
changed = self._jjb_config.builder["ignore_cache"] or self.cache.has_changed(
job.name, md5
)
if not changed:
logger.debug("'{0}' has not changed".format(job.name))
return changed
@ -249,15 +259,20 @@ class JenkinsManager(object):
logger.debug("'{0}' does not currently exist".format(job.name))
return exists
def update_jobs(self, xml_jobs, output=None, n_workers=None,
existing_only=None, config_xml=False):
def update_jobs(
self,
xml_jobs,
output=None,
n_workers=None,
existing_only=None,
config_xml=False,
):
orig = time.time()
logger.info("Number of jobs generated: %d", len(xml_jobs))
xml_jobs.sort(key=AlphanumSort)
if (output and not hasattr(output, 'write') and
not os.path.isdir(output)):
if output and not hasattr(output, "write") and not os.path.isdir(output):
logger.debug("Creating directory %s" % output)
try:
os.makedirs(output)
@ -267,11 +282,11 @@ class JenkinsManager(object):
if output:
# ensure only wrapped once
if hasattr(output, 'write'):
if hasattr(output, "write"):
output = utils.wrap_stream(output)
for job in xml_jobs:
if hasattr(output, 'write'):
if hasattr(output, "write"):
# `output` is a file-like object
logger.info("Job name: %s", job.name)
logger.debug("Writing XML to '{0}'".format(output))
@ -289,39 +304,31 @@ class JenkinsManager(object):
output_fn = self._setup_output(output, job.name, config_xml)
logger.debug("Writing XML to '{0}'".format(output_fn))
with io.open(output_fn, 'w', encoding='utf-8') as f:
f.write(job.output().decode('utf-8'))
with io.open(output_fn, "w", encoding="utf-8") as f:
f.write(job.output().decode("utf-8"))
return xml_jobs, len(xml_jobs)
# Filter out the jobs that did not change
logging.debug('Filtering %d jobs for changed jobs',
len(xml_jobs))
logging.debug("Filtering %d jobs for changed jobs", len(xml_jobs))
step = time.time()
jobs = [job for job in xml_jobs
if self.changed(job)]
logging.debug("Filtered for changed jobs in %ss",
(time.time() - step))
jobs = [job for job in xml_jobs if self.changed(job)]
logging.debug("Filtered for changed jobs in %ss", (time.time() - step))
if existing_only:
# Filter out the jobs not already in the cache
logging.debug('Filtering %d jobs for existing jobs',
len(jobs))
logging.debug("Filtering %d jobs for existing jobs", len(jobs))
step = time.time()
jobs = [job for job in jobs
if self.exists(job)]
logging.debug("Filtered for existing jobs in %ss",
(time.time() - step))
jobs = [job for job in jobs if self.exists(job)]
logging.debug("Filtered for existing jobs in %ss", (time.time() - step))
if not jobs:
return [], 0
# Update the jobs
logging.debug('Updating jobs')
logging.debug("Updating jobs")
step = time.time()
p_params = [{'job': job} for job in jobs]
results = self.parallel_update_job(
n_workers=n_workers,
concurrent=p_params)
p_params = [{"job": job} for job in jobs]
results = self.parallel_update_job(n_workers=n_workers, concurrent=p_params)
logging.debug("Parsing results")
# generalize the result parsing, as a concurrent job always returns a
# list
@ -336,15 +343,13 @@ class JenkinsManager(object):
self.cache.set(j_name, j_md5)
# write cache to disk
self.cache.save()
logging.debug("Updated %d jobs in %ss",
len(jobs),
time.time() - step)
logging.debug("Updated %d jobs in %ss", len(jobs), time.time() - step)
logging.debug("Total run took %ss", (time.time() - orig))
return jobs, len(jobs)
@concurrent
def parallel_update_job(self, job):
self.update_job(job.name, job.output().decode('utf-8'))
self.update_job(job.name, job.output().decode("utf-8"))
return (job.name, job.md5())
################
@ -361,7 +366,7 @@ class JenkinsManager(object):
@property
def view_list(self):
if self._view_list is None:
self._view_list = set(view['name'] for view in self.views)
self._view_list = set(view["name"] for view in self.views)
return self._view_list
def get_views(self, cache=True):
@ -389,7 +394,7 @@ class JenkinsManager(object):
for view in views:
self.delete_view(view)
if self.cache.is_cached(view):
self.cache.set(view, '')
self.cache.set(view, "")
self.cache.save()
def delete_all_views(self):
@ -399,22 +404,30 @@ class JenkinsManager(object):
views.pop(0)
logger.info("Number of views to delete: %d", len(views))
for view in views:
self.delete_view(view['name'])
self.delete_view(view["name"])
# Need to clear the JJB cache after deletion
self.cache.clear()
def update_view(self, view_name, xml):
if self.is_view(view_name):
logger.info("Reconfiguring jenkins view {0}".format(
self._view_format(view_name)))
logger.info(
"Reconfiguring jenkins view {0}".format(self._view_format(view_name))
)
self.jenkins.reconfig_view(view_name, xml)
else:
logger.info("Creating jenkins view {0}".format(
self._view_format(view_name)))
logger.info(
"Creating jenkins view {0}".format(self._view_format(view_name))
)
self.jenkins.create_view(view_name, xml)
def update_views(self, xml_views, output=None, n_workers=None,
existing_only=None, config_xml=False):
def update_views(
self,
xml_views,
output=None,
n_workers=None,
existing_only=None,
config_xml=False,
):
orig = time.time()
logger.info("Number of views generated: %d", len(xml_views))
@ -422,11 +435,11 @@ class JenkinsManager(object):
if output:
# ensure only wrapped once
if hasattr(output, 'write'):
if hasattr(output, "write"):
output = utils.wrap_stream(output)
for view in xml_views:
if hasattr(output, 'write'):
if hasattr(output, "write"):
# `output` is a file-like object
logger.info("View name: %s", view.name)
logger.debug("Writing XML to '{0}'".format(output))
@ -444,39 +457,31 @@ class JenkinsManager(object):
output_fn = self._setup_output(output, view.name, config_xml)
logger.debug("Writing XML to '{0}'".format(output_fn))
with io.open(output_fn, 'w', encoding='utf-8') as f:
f.write(view.output().decode('utf-8'))
with io.open(output_fn, "w", encoding="utf-8") as f:
f.write(view.output().decode("utf-8"))
return xml_views, len(xml_views)
# Filter out the views that did not change
logging.debug('Filtering %d views for changed views',
len(xml_views))
logging.debug("Filtering %d views for changed views", len(xml_views))
step = time.time()
views = [view for view in xml_views
if self.changed(view)]
logging.debug("Filtered for changed views in %ss",
(time.time() - step))
views = [view for view in xml_views if self.changed(view)]
logging.debug("Filtered for changed views in %ss", (time.time() - step))
if existing_only:
# Filter out the jobs not already in the cache
logging.debug('Filtering %d views for existing jobs',
len(views))
logging.debug("Filtering %d views for existing jobs", len(views))
step = time.time()
views = [view for view in views
if self.exists(view)]
logging.debug("Filtered for existing views in %ss",
(time.time() - step))
views = [view for view in views if self.exists(view)]
logging.debug("Filtered for existing views in %ss", (time.time() - step))
if not views:
return [], 0
# Update the views
logging.debug('Updating views')
logging.debug("Updating views")
step = time.time()
p_params = [{'view': view} for view in views]
results = self.parallel_update_view(
n_workers=n_workers,
concurrent=p_params)
p_params = [{"view": view} for view in views]
results = self.parallel_update_view(n_workers=n_workers, concurrent=p_params)
logging.debug("Parsing results")
# generalize the result parsing, as a concurrent view always returns a
# list
@ -491,13 +496,11 @@ class JenkinsManager(object):
self.cache.set(v_name, v_md5)
# write cache to disk
self.cache.save()
logging.debug("Updated %d views in %ss",
len(views),
time.time() - step)
logging.debug("Updated %d views in %ss", len(views), time.time() - step)
logging.debug("Total run took %ss", (time.time() - orig))
return views, len(views)
@concurrent
def parallel_update_view(self, view):
self.update_view(view.name, view.output().decode('utf-8'))
self.update_view(view.name, view.output().decode("utf-8"))
return (view.name, view.md5())

View File

@ -43,43 +43,45 @@ class JobCache(object):
def __init__(self, jenkins_url, flush=False):
cache_dir = self.get_cache_dir()
# One cache per remote Jenkins URL:
host_vary = re.sub(r'[^A-Za-z0-9\-\~]', '_', jenkins_url)
host_vary = re.sub(r"[^A-Za-z0-9\-\~]", "_", jenkins_url)
self.cachefilename = os.path.join(
cache_dir, 'cache-host-jobs-' + host_vary + '.yml')
cache_dir, "cache-host-jobs-" + host_vary + ".yml"
)
# generate named lockfile if none exists, and lock it
self._locked = self._lock()
if not self._locked:
raise errors.JenkinsJobsException(
"Unable to lock cache for '%s'" % jenkins_url)
"Unable to lock cache for '%s'" % jenkins_url
)
if flush or not os.path.isfile(self.cachefilename):
self.data = {}
else:
with io.open(self.cachefilename, 'r', encoding='utf-8') as yfile:
with io.open(self.cachefilename, "r", encoding="utf-8") as yfile:
self.data = yaml.load(yfile)
logger.debug("Using cache: '{0}'".format(self.cachefilename))
def _lock(self):
self._fastener = fasteners.InterProcessLock("%s.lock" %
self.cachefilename)
self._fastener = fasteners.InterProcessLock("%s.lock" % self.cachefilename)
return self._fastener.acquire(delay=1, max_delay=2, timeout=60)
def _unlock(self):
if getattr(self, '_locked', False):
if getattr(self, '_fastener', None) is not None:
if getattr(self, "_locked", False):
if getattr(self, "_fastener", None) is not None:
self._fastener.release()
self._locked = None
@staticmethod
def get_cache_dir():
home = os.path.expanduser('~')
if home == '~':
raise OSError('Could not locate home folder')
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
os.path.join(home, '.cache')
path = os.path.join(xdg_cache_home, 'jenkins_jobs')
home = os.path.expanduser("~")
if home == "~":
raise OSError("Could not locate home folder")
xdg_cache_home = os.environ.get("XDG_CACHE_HOME") or os.path.join(
home, ".cache"
)
path = os.path.join(xdg_cache_home, "jenkins_jobs")
if not os.path.isdir(path):
try:
os.makedirs(path)
@ -111,9 +113,10 @@ class JobCache(object):
# use self references to required modules in case called via __del__
# write to tempfile under same directory and then replace to avoid
# issues around corruption such the process be killed
tfile = self._tempfile.NamedTemporaryFile(dir=self.get_cache_dir(),
delete=False)
tfile.write(self._yaml.dump(self.data).encode('utf-8'))
tfile = self._tempfile.NamedTemporaryFile(
dir=self.get_cache_dir(), delete=False
)
tfile.write(self._yaml.dump(self.data).encode("utf-8"))
# force contents to be synced on disk before overwriting cachefile
tfile.flush()
self._os.fsync(tfile.fileno())
@ -131,10 +134,12 @@ class JobCache(object):
def __del__(self):
# check we initialized sufficiently in case called
# due to an exception occurring in the __init__
if getattr(self, 'data', None) is not None:
if getattr(self, "data", None) is not None:
try:
self.save()
except Exception as e:
self._logger.error("Failed to write to cache file '%s' on "
"exit: %s" % (self.cachefilename, e))
self._logger.error(
"Failed to write to cache file '%s' on "
"exit: %s" % (self.cachefilename, e)
)
self._unlock()

View File

@ -31,8 +31,7 @@ logger = logging.getLogger()
def __version__():
return "Jenkins Job Builder version: %s" % \
version.version_info.version_string()
return "Jenkins Job Builder version: %s" % version.version_info.version_string()
class JenkinsJobs(object):
@ -58,17 +57,17 @@ class JenkinsJobs(object):
self.parser = create_parser()
self.options = self.parser.parse_args(args)
self.jjb_config = JJBConfig(self.options.conf,
config_section=self.options.section,
**kwargs)
self.jjb_config = JJBConfig(
self.options.conf, config_section=self.options.section, **kwargs
)
if not self.options.command:
self.parser.error("Must specify a 'command' to be performed")
if (self.options.log_level is not None):
self.options.log_level = getattr(logging,
self.options.log_level.upper(),
logger.getEffectiveLevel())
if self.options.log_level is not None:
self.options.log_level = getattr(
logging, self.options.log_level.upper(), logger.getEffectiveLevel()
)
logger.setLevel(self.options.log_level)
self._parse_additional()
@ -84,50 +83,58 @@ class JenkinsJobs(object):
def _parse_additional(self):
self._set_config(self.jjb_config.builder, 'ignore_cache')
self._set_config(self.jjb_config.builder, 'flush_cache')
self._set_config(self.jjb_config.builder, 'update')
self._set_config(self.jjb_config.yamlparser, 'allow_empty_variables')
self._set_config(self.jjb_config.jenkins, 'section')
self._set_config(self.jjb_config.jenkins, 'user')
self._set_config(self.jjb_config.jenkins, 'password')
self._set_config(self.jjb_config.builder, "ignore_cache")
self._set_config(self.jjb_config.builder, "flush_cache")
self._set_config(self.jjb_config.builder, "update")
self._set_config(self.jjb_config.yamlparser, "allow_empty_variables")
self._set_config(self.jjb_config.jenkins, "section")
self._set_config(self.jjb_config.jenkins, "user")
self._set_config(self.jjb_config.jenkins, "password")
# Note: CLI options override config file options.
if getattr(self.options, 'update', None) is None:
self.options.update = self.jjb_config.builder.get('update')
if getattr(self.options, "update", None) is None:
self.options.update = self.jjb_config.builder.get("update")
if self.options.update is None:
self.options.update = 'all'
self.options.update = "all"
if getattr(self.options, 'plugins_info_path', None) is not None:
with io.open(self.options.plugins_info_path, 'r',
encoding='utf-8') as yaml_file:
if getattr(self.options, "plugins_info_path", None) is not None:
with io.open(
self.options.plugins_info_path, "r", encoding="utf-8"
) as yaml_file:
plugins_info = yaml.load(yaml_file)
if not isinstance(plugins_info, list):
self.parser.error("{0} must contain a Yaml list!".format(
self.options.plugins_info_path))
self.jjb_config.builder['plugins_info'] = plugins_info
self.parser.error(
"{0} must contain a Yaml list!".format(
self.options.plugins_info_path
)
)
self.jjb_config.builder["plugins_info"] = plugins_info
if getattr(self.options, 'path', None):
if hasattr(self.options.path, 'read'):
if getattr(self.options, "path", None):
if hasattr(self.options.path, "read"):
logger.debug("Input file is stdin")
if self.options.path.isatty():
if platform.system() == 'Windows':
key = 'CTRL+Z'
if platform.system() == "Windows":
key = "CTRL+Z"
else:
key = 'CTRL+D'
logger.warning("Reading configuration from STDIN. "
"Press %s to end input.", key)
key = "CTRL+D"
logger.warning(
"Reading configuration from STDIN. " "Press %s to end input.",
key,
)
self.options.path = [self.options.path]
else:
# take list of paths
self.options.path = self.options.path.split(os.pathsep)
do_recurse = (getattr(self.options, 'recursive', False) or
self.jjb_config.recursive)
do_recurse = (
getattr(self.options, "recursive", False)
or self.jjb_config.recursive
)
excludes = ([e for elist in self.options.exclude
for e in elist.split(os.pathsep)] or
self.jjb_config.excludes)
excludes = [
e for elist in self.options.exclude for e in elist.split(os.pathsep)
] or self.jjb_config.excludes
paths = []
for path in self.options.path:
if do_recurse and os.path.isdir(path):
@ -139,8 +146,8 @@ class JenkinsJobs(object):
def execute(self):
extension_manager = extension.ExtensionManager(
namespace='jjb.cli.subcommands',
invoke_on_load=True,)
namespace="jjb.cli.subcommands", invoke_on_load=True
)
ext = extension_manager[self.options.command]
ext.obj.execute(self.options, self.jjb_config)
@ -154,10 +161,11 @@ def main():
if sys.version_info[0] == 2:
import codecs
reload(sys) # noqa
sys.setdefaultencoding('utf-8')
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
sys.stderr = codecs.getwriter('utf8')(sys.stderr)
sys.setdefaultencoding("utf-8")
sys.stdout = codecs.getwriter("utf8")(sys.stdout)
sys.stderr = codecs.getwriter("utf8")(sys.stderr)
# end of workaround
argv = sys.argv[1:]

View File

@ -22,8 +22,10 @@ from stevedore import extension
def __version__():
return "Jenkins Job Builder version: %s" % \
jenkins_jobs.version.version_info.version_string()
return (
"Jenkins Job Builder version: %s"
% jenkins_jobs.version.version_info.version_string()
)
def create_parser():
@ -31,67 +33,78 @@ def create_parser():
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--conf',
dest='conf',
default=os.environ.get('JJB_CONF', None),
help="configuration file [JJB_CONF]")
"--conf",
dest="conf",
default=os.environ.get("JJB_CONF", None),
help="configuration file [JJB_CONF]",
)
parser.add_argument(
'-l',
'--log_level',
dest='log_level',
default=os.environ.get('JJB_LOG_LEVEL', 'info'),
help="log level (default: %(default)s) [JJB_LOG_LEVEL]")
"-l",
"--log_level",
dest="log_level",
default=os.environ.get("JJB_LOG_LEVEL", "info"),
help="log level (default: %(default)s) [JJB_LOG_LEVEL]",
)
parser.add_argument(
'--ignore-cache',
action='store_true',
dest='ignore_cache',
"--ignore-cache",
action="store_true",
dest="ignore_cache",
default=None,
help="ignore the cache and update the jobs anyhow (that will "
"only flush the specified jobs cache)")
"only flush the specified jobs cache)",
)
parser.add_argument(
'--flush-cache',
action='store_true',
dest='flush_cache',
"--flush-cache",
action="store_true",
dest="flush_cache",
default=None,
help="flush all the cache entries before updating")
help="flush all the cache entries before updating",
)
parser.add_argument(
'--version',
dest='version',
action='version',
"--version",
dest="version",
action="version",
version=__version__(),
help="show version")
help="show version",
)
parser.add_argument(
'--allow-empty-variables',
action='store_true',
dest='allow_empty_variables',
"--allow-empty-variables",
action="store_true",
dest="allow_empty_variables",
default=None,
help="Don\'t fail if any of the variables inside any string are "
"not defined, replace with empty string instead.")
help="Don't fail if any of the variables inside any string are "
"not defined, replace with empty string instead.",
)
parser.add_argument(
'--server', '-s',
dest='section',
default=os.environ.get('JJB_SECTION', 'jenkins'),
"--server",
"-s",
dest="section",
default=os.environ.get("JJB_SECTION", "jenkins"),
help="The Jenkins server ini section to use. Defaults to 'jenkins' "
"[JJB_SECTION]")
"[JJB_SECTION]",
)
parser.add_argument(
'--user', '-u',
default=os.environ.get('JJB_USER', None),
"--user",
"-u",
default=os.environ.get("JJB_USER", None),
help="The Jenkins user to use for authentication. This overrides "
"the user specified in the configuration file. [JJB_USER]")
"the user specified in the configuration file. [JJB_USER]",
)
parser.add_argument(
'--password', '-p',
default=os.environ.get('JJB_PASSWORD', None),
"--password",
"-p",
default=os.environ.get("JJB_PASSWORD", None),
help="Password or API token to use for authenticating towards Jenkins."
" This overrides the password specified in the configuration file."
" [JJB_PASSWORD]")
" [JJB_PASSWORD]",
)
subparser = parser.add_subparsers(
dest='command',
help="update, test, list or delete job")
dest="command", help="update, test, list or delete job"
)
extension_manager = extension.ExtensionManager(
namespace='jjb.cli.subcommands',
invoke_on_load=True,
namespace="jjb.cli.subcommands", invoke_on_load=True
)
def parse_subcommand_args(ext, subparser):

View File

@ -22,6 +22,7 @@ class BaseSubCommand(object):
"""Base class for Jenkins Job Builder subcommands, intended to allow
subcommands to be loaded as stevedore extensions by third party users.
"""
def __init__(self):
pass
@ -52,16 +53,20 @@ class BaseSubCommand(object):
"""Add '--recursive' and '--exclude' arguments to given parser.
"""
parser.add_argument(
'-r', '--recursive',
action='store_true',
dest='recursive',
"-r",
"--recursive",
action="store_true",
dest="recursive",
default=False,
help="look for yaml files recursively")
help="look for yaml files recursively",
)
parser.add_argument(
'-x', '--exclude',
dest='exclude',
action='append',
"-x",
"--exclude",
dest="exclude",
action="append",
default=[],
help="paths to exclude when using recursive search, "
"uses standard globbing.")
"uses standard globbing.",
)

View File

@ -22,32 +22,33 @@ import jenkins_jobs.cli.subcommand.base as base
class DeleteSubCommand(base.BaseSubCommand):
def parse_args(self, subparser):
delete = subparser.add_parser('delete')
delete = subparser.add_parser("delete")
self.parse_option_recursive_exclude(delete)
delete.add_argument("name", help="name of job", nargs="+")
delete.add_argument(
'name',
help='name of job',
nargs='+')
delete.add_argument(
'-p', '--path',
"-p",
"--path",
default=None,
help="colon-separated list of paths to YAML files "
"or directories")
delete.add_argument(
'-j', '--jobs-only',
action='store_true', dest='del_jobs',
default=False,
help='delete only jobs'
help="colon-separated list of paths to YAML files " "or directories",
)
delete.add_argument(
'-v', '--views-only',
action='store_true', dest='del_views',
"-j",
"--jobs-only",
action="store_true",
dest="del_jobs",
default=False,
help='delete only views'
help="delete only jobs",
)
delete.add_argument(
"-v",
"--views-only",
action="store_true",
dest="del_views",
default=False,
help="delete only views",
)
def execute(self, options, jjb_config):
@ -55,7 +56,8 @@ class DeleteSubCommand(base.BaseSubCommand):
if options.del_jobs and options.del_views:
raise JenkinsJobsException(
'"--views-only" and "--jobs-only" cannot be used together.')
'"--views-only" and "--jobs-only" cannot be used together.'
)
fn = options.path
registry = ModuleRegistry(jjb_config, builder.plugins_list)
@ -64,8 +66,8 @@ class DeleteSubCommand(base.BaseSubCommand):
if fn:
parser.load_files(fn)
parser.expandYaml(registry, options.name)
jobs = [j['name'] for j in parser.jobs]
views = [v['name'] for v in parser.views]
jobs = [j["name"] for j in parser.jobs]
views = [v["name"] for v in parser.views]
else:
jobs = options.name
views = options.name

View File

@ -27,26 +27,30 @@ logger = logging.getLogger(__name__)
class DeleteAllSubCommand(base.BaseSubCommand):
def parse_args(self, subparser):
delete_all = subparser.add_parser(
'delete-all',
"delete-all",
help="delete *ALL* jobs from Jenkins server, including "
"those not managed by Jenkins Job Builder.")
"those not managed by Jenkins Job Builder.",
)
self.parse_option_recursive_exclude(delete_all)
delete_all.add_argument(
'-j', '--jobs-only',
action='store_true', dest='del_jobs',
"-j",
"--jobs-only",
action="store_true",
dest="del_jobs",
default=False,
help='delete only jobs'
help="delete only jobs",
)
delete_all.add_argument(
'-v', '--views-only',
action='store_true', dest='del_views',
"-v",
"--views-only",
action="store_true",
dest="del_views",
default=False,
help='delete only views'
help="delete only views",
)
def execute(self, options, jjb_config):
@ -55,24 +59,26 @@ class DeleteAllSubCommand(base.BaseSubCommand):
reach = set()
if options.del_jobs and options.del_views:
raise JenkinsJobsException(
'"--views-only" and "--jobs-only" cannot be used together.')
'"--views-only" and "--jobs-only" cannot be used together.'
)
elif options.del_jobs and not options.del_views:
reach.add('jobs')
reach.add("jobs")
elif options.del_views and not options.del_jobs:
reach.add('views')
reach.add("views")
else:
reach.update(('jobs', 'views'))
reach.update(("jobs", "views"))
if not utils.confirm(
'Sure you want to delete *ALL* {} from Jenkins '
'server?\n(including those not managed by Jenkins '
'Job Builder)'.format(" AND ".join(reach))):
sys.exit('Aborted')
"Sure you want to delete *ALL* {} from Jenkins "
"server?\n(including those not managed by Jenkins "
"Job Builder)".format(" AND ".join(reach))
):
sys.exit("Aborted")
if 'jobs' in reach:
if "jobs" in reach:
logger.info("Deleting all jobs")
builder.delete_all_jobs()
if 'views' in reach:
if "views" in reach:
logger.info("Deleting all views")
builder.delete_all_views()

View File

@ -25,17 +25,18 @@ logger = logging.getLogger(__name__)
class GetPluginsInfoSubCommand(base.BaseSubCommand):
def parse_args(self, subparser):
plugins_info = subparser.add_parser(
'get-plugins-info',
help='get plugins info yaml by querying Jenkins server.')
"get-plugins-info", help="get plugins info yaml by querying Jenkins server."
)
plugins_info.add_argument(
'-o', '--output-file',
default='plugins_info.yaml',
dest='plugins_info_file',
help='file to save output to.')
"-o",
"--output-file",
default="plugins_info.yaml",
dest="plugins_info_file",
help="file to save output to.",
)
def execute(self, options, jjb_config):
builder = JenkinsManager(jjb_config)
@ -43,14 +44,14 @@ class GetPluginsInfoSubCommand(base.BaseSubCommand):
plugins_info = []
for plugin in plugin_data:
info = {
'longName': str(plugin['longName']),
'shortName': str(plugin['shortName']),
'version': str(plugin['version']),
"longName": str(plugin["longName"]),
"shortName": str(plugin["shortName"]),
"version": str(plugin["version"]),
}
plugins_info.append(info)
if options.plugins_info_file:
with open(options.plugins_info_file, 'w') as outfile:
with open(options.plugins_info_file, "w") as outfile:
outfile.write(yaml.dump(plugins_info))
logger.info("Generated {} file".format(options.plugins_info_file))
else:

View File

@ -27,18 +27,15 @@ def list_duplicates(seq):
class ListSubCommand(base.BaseSubCommand):
def parse_args(self, subparser):
list = subparser.add_parser('list', help="List jobs")
list = subparser.add_parser("list", help="List jobs")
self.parse_option_recursive_exclude(list)
list.add_argument('names',
help='name(s) of job(s)',
nargs='*',
default=None)
list.add_argument('-p', '--path', default=None,
help='path to YAML file or directory')
list.add_argument("names", help="name(s) of job(s)", nargs="*", default=None)
list.add_argument(
"-p", "--path", default=None, help="path to YAML file or directory"
)
def execute(self, options, jjb_config):
self.jjb_config = jjb_config
@ -50,24 +47,25 @@ class ListSubCommand(base.BaseSubCommand):
stdout = utils.wrap_stream(sys.stdout)
for job in jobs:
stdout.write((job + '\n').encode('utf-8'))
stdout.write((job + "\n").encode("utf-8"))
def get_jobs(self, jobs_glob=None, fn=None):
if fn:
r = registry.ModuleRegistry(self.jjb_config,
self.jenkins.plugins_list)
r = registry.ModuleRegistry(self.jjb_config, self.jenkins.plugins_list)
p = parser.YamlParser(self.jjb_config)
p.load_files(fn)
p.expandYaml(r, jobs_glob)
jobs = [j['name'] for j in p.jobs]
jobs = [j["name"] for j in p.jobs]
else:
jobs = [j['name'] for j in self.jenkins.get_jobs()
if not jobs_glob or parser.matches(j['name'], jobs_glob)]
jobs = [
j["name"]
for j in self.jenkins.get_jobs()
if not jobs_glob or parser.matches(j["name"], jobs_glob)
]
jobs = sorted(jobs)
for duplicate in list_duplicates(jobs):
logging.warning("Found duplicate job name '%s', likely bug.",
duplicate)
logging.warning("Found duplicate job name '%s', likely bug.", duplicate)
logging.debug("Builder.get_jobs: returning %r", jobs)

View File

@ -23,9 +23,8 @@ logger = logging.getLogger(__name__)
class TestSubCommand(update.UpdateSubCommand):
def parse_args(self, subparser):
test = subparser.add_parser('test')
test = subparser.add_parser("test")
self.parse_option_recursive_exclude(test)
@ -33,36 +32,46 @@ class TestSubCommand(update.UpdateSubCommand):
self.parse_arg_names(test)
test.add_argument(
'--config-xml',
action='store_true',
dest='config_xml',
"--config-xml",
action="store_true",
dest="config_xml",
default=False,
help='use alternative output file layout using config.xml files')
help="use alternative output file layout using config.xml files",
)
test.add_argument(
'-p', '--plugin-info',
dest='plugins_info_path',
"-p",
"--plugin-info",
dest="plugins_info_path",
default=None,
help='path to plugin info YAML file')
help="path to plugin info YAML file",
)
test.add_argument(
'-o',
dest='output_dir',
default=sys.stdout,
help='path to output XML')
"-o", dest="output_dir", default=sys.stdout, help="path to output XML"
)
def execute(self, options, jjb_config):
if not options.config_xml:
logger.warn('(Deprecated) The default output behavior of'
' `jenkins-jobs test` when given the --output'
' flag will change in JJB 3.0.'
' Instead of writing jobs to OUTPUT/jobname;'
' they will be written to OUTPUT/jobname/config.xml.'
' The new behavior can be enabled by the passing'
' `--config-xml` parameter.')
logger.warn(
"(Deprecated) The default output behavior of"
" `jenkins-jobs test` when given the --output"
" flag will change in JJB 3.0."
" Instead of writing jobs to OUTPUT/jobname;"
" they will be written to OUTPUT/jobname/config.xml."
" The new behavior can be enabled by the passing"
" `--config-xml` parameter."
)
builder, xml_jobs, xml_views = self._generate_xmljobs(
options, jjb_config)
builder, xml_jobs, xml_views = self._generate_xmljobs(options, jjb_config)
builder.update_jobs(xml_jobs, output=options.output_dir, n_workers=1,
config_xml=options.config_xml)
builder.update_views(xml_views, output=options.output_dir, n_workers=1,
config_xml=options.config_xml)
builder.update_jobs(
xml_jobs,
output=options.output_dir,
n_workers=1,
config_xml=options.config_xml,
)
builder.update_views(
xml_views,
output=options.output_dir,
n_workers=1,
config_xml=options.config_xml,
)

View File

@ -30,22 +30,19 @@ logger = logging.getLogger(__name__)
class UpdateSubCommand(base.BaseSubCommand):
def parse_arg_path(self, parser):
parser.add_argument(
'path',
nargs='?',
"path",
nargs="?",
default=sys.stdin,
help="colon-separated list of paths to YAML files "
"or directories")
help="colon-separated list of paths to YAML files " "or directories",
)
def parse_arg_names(self, parser):
parser.add_argument(
'names',
help='name(s) of job(s)', nargs='*')
parser.add_argument("names", help="name(s) of job(s)", nargs="*")
def parse_args(self, subparser):
update = subparser.add_parser('update')
update = subparser.add_parser("update")
self.parse_option_recursive_exclude(update)
@ -53,51 +50,59 @@ class UpdateSubCommand(base.BaseSubCommand):
self.parse_arg_names(update)
update.add_argument(
'--delete-old',
action='store_true',
dest='delete_old',
"--delete-old",
action="store_true",
dest="delete_old",
default=False,
help='delete obsolete jobs')
help="delete obsolete jobs",
)
update.add_argument(
'-p', '--plugin-info',
dest='plugins_info_path',
"-p",
"--plugin-info",
dest="plugins_info_path",
default=None,
help='path to plugin info YAML file. Can be used to provide '
'previously retrieved plugins info when connecting credentials '
'don\'t have permissions to query.')
help="path to plugin info YAML file. Can be used to provide "
"previously retrieved plugins info when connecting credentials "
"don't have permissions to query.",
)
update.add_argument(
'--workers',
"--workers",
type=int,
default=1,
dest='n_workers',
dest="n_workers",
help="number of workers to use, 0 for autodetection and 1 "
"for just one worker.")
"for just one worker.",
)
update.add_argument(
'--existing-only',
action='store_true',
"--existing-only",
action="store_true",
default=False,
dest='existing_only',
help='update existing jobs only')
dest="existing_only",
help="update existing jobs only",
)
update_type = update.add_mutually_exclusive_group()
update_type.add_argument(
'-j', '--jobs-only',
action='store_const',
dest='update',
const='jobs',
help='update only jobs')
"-j",
"--jobs-only",
action="store_const",
dest="update",
const="jobs",
help="update only jobs",
)
update_type.add_argument(
'-v', '--views-only',
action='store_const',
dest='update',
const='views',
help='update only views')
"-v",
"--views-only",
action="store_const",
dest="update",
const="views",
help="update only views",
)
def _generate_xmljobs(self, options, jjb_config=None):
builder = JenkinsManager(jjb_config)
logger.info("Updating jobs in {0} ({1})".format(
options.path, options.names))
logger.info("Updating jobs in {0} ({1})".format(options.path, options.names))
orig = time.time()
# Generate XML
@ -109,45 +114,51 @@ class UpdateSubCommand(base.BaseSubCommand):
parser.load_files(options.path)
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(
registry, options.names)
job_data_list, view_data_list = parser.expandYaml(registry, options.names)
xml_jobs = xml_job_generator.generateXML(job_data_list)
xml_views = xml_view_generator.generateXML(view_data_list)
jobs = parser.jobs
step = time.time()
logging.debug('%d XML files generated in %ss',
len(jobs), str(step - orig))
logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))
return builder, xml_jobs, xml_views
def execute(self, options, jjb_config):
if options.n_workers < 0:
raise JenkinsJobsException(
'Number of workers must be equal or greater than 0')
"Number of workers must be equal or greater than 0"
)
builder, xml_jobs, xml_views = self._generate_xmljobs(
options, jjb_config)
builder, xml_jobs, xml_views = self._generate_xmljobs(options, jjb_config)
if options.update == 'jobs':
if options.update == "jobs":
jobs, num_updated_jobs = builder.update_jobs(
xml_jobs, n_workers=options.n_workers,
existing_only=options.existing_only)
xml_jobs,
n_workers=options.n_workers,
existing_only=options.existing_only,
)
logger.info("Number of jobs updated: %d", num_updated_jobs)
elif options.update == 'views':
elif options.update == "views":
views, num_updated_views = builder.update_views(
xml_views, n_workers=options.n_workers,
existing_only=options.existing_only)
xml_views,
n_workers=options.n_workers,
existing_only=options.existing_only,
)
logger.info("Number of views updated: %d", num_updated_views)
else:
jobs, num_updated_jobs = builder.update_jobs(
xml_jobs, n_workers=options.n_workers,
existing_only=options.existing_only)
xml_jobs,
n_workers=options.n_workers,
existing_only=options.existing_only,
)
logger.info("Number of jobs updated: %d", num_updated_jobs)
views, num_updated_views = builder.update_views(
xml_views, n_workers=options.n_workers,
existing_only=options.existing_only)
xml_views,
n_workers=options.n_workers,
existing_only=options.existing_only,
)
logger.info("Number of views updated: %d", num_updated_views)
keep_jobs = [job.name for job in xml_jobs]

View File

@ -27,9 +27,7 @@ from jenkins_jobs import builder
from jenkins_jobs.errors import JJBConfigException
from jenkins_jobs.errors import JenkinsJobsException
__all__ = [
"JJBConfig"
]
__all__ = ["JJBConfig"]
logger = logging.getLogger(__name__)
@ -50,21 +48,21 @@ url=http://localhost:8080/
query_plugins_info=False
"""
CONFIG_REQUIRED_MESSAGE = ("A valid configuration file is required. "
"No configuration file passed.")
CONFIG_REQUIRED_MESSAGE = (
"A valid configuration file is required. " "No configuration file passed."
)
DEPRECATED_PLUGIN_CONFIG_SECTION_MESSAGE = (
"Defining plugin configuration using a [{plugin}] section in your config"
" file is deprecated. The recommended way to define plugins now is by"
" using a [plugin \"{plugin}\"] section"
' using a [plugin "{plugin}"] section'
)
_NOTSET = object()
class JJBConfig(object):
def __init__(self, config_filename=None,
config_file_required=False,
config_section='jenkins'):
def __init__(
self, config_filename=None, config_file_required=False, config_section="jenkins"
):
"""
The JJBConfig class is intended to encapsulate and resolve priority
@ -93,11 +91,11 @@ class JJBConfig(object):
config_parser = self._init_defaults()
global_conf = '/etc/jenkins_jobs/jenkins_jobs.ini'
user_conf = os.path.join(os.path.expanduser('~'), '.config',
'jenkins_jobs', 'jenkins_jobs.ini')
local_conf = os.path.join(os.path.dirname(__file__),
'jenkins_jobs.ini')
global_conf = "/etc/jenkins_jobs/jenkins_jobs.ini"
user_conf = os.path.join(
os.path.expanduser("~"), ".config", "jenkins_jobs", "jenkins_jobs.ini"
)
local_conf = os.path.join(os.path.dirname(__file__), "jenkins_jobs.ini")
conf = None
if config_filename is not None:
conf = config_filename
@ -120,8 +118,10 @@ class JJBConfig(object):
if config_file_required:
raise JJBConfigException(CONFIG_REQUIRED_MESSAGE)
else:
logger.warning("Config file, {0}, not found. Using "
"default config values.".format(conf))
logger.warning(
"Config file, {0}, not found. Using "
"default config values.".format(conf)
)
if config_fp is not None:
if PY2:
@ -162,33 +162,35 @@ class JJBConfig(object):
if os.path.isfile(config_filename):
self.__config_file = config_filename # remember file we read from
logger.debug("Reading config from {0}".format(config_filename))
config_fp = io.open(config_filename, 'r', encoding='utf-8')
config_fp = io.open(config_filename, "r", encoding="utf-8")
else:
raise JJBConfigException(
"A valid configuration file is required. "
"\n{0} is not valid.".format(config_filename))
"\n{0} is not valid.".format(config_filename)
)
return config_fp
def _handle_deprecated_hipchat_config(self):
config = self.config_parser
if config.has_section('hipchat'):
if config.has_section("hipchat"):
if config.has_section('plugin "hipchat"'):
logger.warning(
"Both [hipchat] and [plugin \"hipchat\"] sections "
'Both [hipchat] and [plugin "hipchat"] sections '
"defined, legacy [hipchat] section will be ignored."
)
else:
logger.warning(
"[hipchat] section is deprecated and should be moved to a "
"[plugins \"hipchat\"] section instead as the [hipchat] "
'[plugins "hipchat"] section instead as the [hipchat] '
"section will be ignored in the future."
)
config.add_section('plugin "hipchat"')
for option in config.options("hipchat"):
config.set('plugin "hipchat"', option,
config.get("hipchat", option))
config.set(
'plugin "hipchat"', option, config.get("hipchat", option)
)
config.remove_section("hipchat")
@ -197,9 +199,10 @@ class JJBConfig(object):
# interpolation to remove the need for plugins to need information
# directly from the jenkins section within code and allow variables
# in the config file to refer instead.
if (config.has_section('plugin "hipchat"') and
not config.has_option('plugin "hipchat"', 'url')):
config.set('plugin "hipchat"', "url", config.get('jenkins', 'url'))
if config.has_section('plugin "hipchat"') and not config.has_option(
'plugin "hipchat"', "url"
):
config.set('plugin "hipchat"', "url", config.get("jenkins", "url"))
def _setup(self):
config = self.config_parser
@ -208,26 +211,27 @@ class JJBConfig(object):
# check the ignore_cache setting
ignore_cache = False
if config.has_option(self._section, 'ignore_cache'):
logger.warning("ignore_cache option should be moved to the "
"[job_builder] section in the config file, the "
"one specified in the [jenkins] section will be "
"ignored in the future")
ignore_cache = config.getboolean(self._section, 'ignore_cache')
elif config.has_option('job_builder', 'ignore_cache'):
ignore_cache = config.getboolean('job_builder', 'ignore_cache')
self.builder['ignore_cache'] = ignore_cache
if config.has_option(self._section, "ignore_cache"):
logger.warning(
"ignore_cache option should be moved to the "
"[job_builder] section in the config file, the "
"one specified in the [jenkins] section will be "
"ignored in the future"
)
ignore_cache = config.getboolean(self._section, "ignore_cache")
elif config.has_option("job_builder", "ignore_cache"):
ignore_cache = config.getboolean("job_builder", "ignore_cache")
self.builder["ignore_cache"] = ignore_cache
# check the flush_cache setting
flush_cache = False
if config.has_option('job_builder', 'flush_cache'):
flush_cache = config.getboolean('job_builder', 'flush_cache')
self.builder['flush_cache'] = flush_cache
if config.has_option("job_builder", "flush_cache"):
flush_cache = config.getboolean("job_builder", "flush_cache")
self.builder["flush_cache"] = flush_cache
# check the print_job_urls setting
if config.has_option('job_builder', 'print_job_urls'):
self.print_job_urls = config.getboolean('job_builder',
'print_job_urls')
if config.has_option("job_builder", "print_job_urls"):
self.print_job_urls = config.getboolean("job_builder", "print_job_urls")
# Jenkins supports access as an anonymous user, which can be used to
# ensure read-only behaviour when querying the version of plugins
@ -240,16 +244,16 @@ class JJBConfig(object):
# https://bugs.launchpad.net/openstack-ci/+bug/1259631
try:
user = config.get(self._section, 'user')
user = config.get(self._section, "user")
except (TypeError, configparser.NoOptionError):
user = None
self.jenkins['user'] = user
self.jenkins["user"] = user
try:
password = config.get(self._section, 'password')
password = config.get(self._section, "password")
except (TypeError, configparser.NoOptionError):
password = None
self.jenkins['password'] = password
self.jenkins["password"] = password
# None -- no timeout, blocking mode; same as setblocking(True)
# 0.0 -- non-blocking mode; same as setblocking(False) <--- default
@ -259,86 +263,94 @@ class JJBConfig(object):
# "timeout=jenkins_jobs.builder._DEFAULT_TIMEOUT" or not set timeout at
# all.
try:
timeout = config.getfloat(self._section, 'timeout')
timeout = config.getfloat(self._section, "timeout")
except (ValueError):
raise JenkinsJobsException("Jenkins timeout config is invalid")
except (TypeError, configparser.NoOptionError):
timeout = builder._DEFAULT_TIMEOUT
self.jenkins['timeout'] = timeout
self.jenkins["timeout"] = timeout
plugins_info = None
if (config.has_option(self._section, 'query_plugins_info') and
not config.getboolean(self._section, "query_plugins_info")):
if config.has_option(
self._section, "query_plugins_info"
) and not config.getboolean(self._section, "query_plugins_info"):
logger.debug("Skipping plugin info retrieval")
plugins_info = []
self.builder['plugins_info'] = plugins_info
self.builder["plugins_info"] = plugins_info
self.recursive = config.getboolean('job_builder', 'recursive')
self.excludes = config.get('job_builder', 'exclude').split(os.pathsep)
self.recursive = config.getboolean("job_builder", "recursive")
self.excludes = config.get("job_builder", "exclude").split(os.pathsep)
# The way we want to do things moving forward:
self.jenkins['url'] = config.get(self._section, 'url')
self.builder['print_job_urls'] = self.print_job_urls
self.jenkins["url"] = config.get(self._section, "url")
self.builder["print_job_urls"] = self.print_job_urls
# keep descriptions ? (used by yamlparser)
keep_desc = False
if (config and config.has_section('job_builder') and
config.has_option('job_builder', 'keep_descriptions')):
keep_desc = config.getboolean('job_builder',
'keep_descriptions')
self.yamlparser['keep_descriptions'] = keep_desc
if (
config
and config.has_section("job_builder")
and config.has_option("job_builder", "keep_descriptions")
):
keep_desc = config.getboolean("job_builder", "keep_descriptions")
self.yamlparser["keep_descriptions"] = keep_desc
# figure out the include path (used by yamlparser)
path = ["."]
if (config and config.has_section('job_builder') and
config.has_option('job_builder', 'include_path')):
path = config.get('job_builder',
'include_path').split(':')
self.yamlparser['include_path'] = path
if (
config
and config.has_section("job_builder")
and config.has_option("job_builder", "include_path")
):
path = config.get("job_builder", "include_path").split(":")
self.yamlparser["include_path"] = path
# allow duplicates?
allow_duplicates = False
if config and config.has_option('job_builder', 'allow_duplicates'):
allow_duplicates = config.getboolean('job_builder',
'allow_duplicates')
self.yamlparser['allow_duplicates'] = allow_duplicates
if config and config.has_option("job_builder", "allow_duplicates"):
allow_duplicates = config.getboolean("job_builder", "allow_duplicates")
self.yamlparser["allow_duplicates"] = allow_duplicates
# allow empty variables?
self.yamlparser['allow_empty_variables'] = (
config and config.has_section('job_builder') and
config.has_option('job_builder', 'allow_empty_variables') and
config.getboolean('job_builder', 'allow_empty_variables'))
self.yamlparser["allow_empty_variables"] = (
config
and config.has_section("job_builder")
and config.has_option("job_builder", "allow_empty_variables")
and config.getboolean("job_builder", "allow_empty_variables")
)
# retain anchors across files?
retain_anchors = False
if config and config.has_option('job_builder', 'retain_anchors'):
retain_anchors = config.getboolean('job_builder',
'retain_anchors')
self.yamlparser['retain_anchors'] = retain_anchors
if config and config.has_option("job_builder", "retain_anchors"):
retain_anchors = config.getboolean("job_builder", "retain_anchors")
self.yamlparser["retain_anchors"] = retain_anchors
update = None
if (config and config.has_section('job_builder') and
config.has_option('job_builder', 'update')):
update = config.get('job_builder', 'update')
self.builder['update'] = update
if (
config
and config.has_section("job_builder")
and config.has_option("job_builder", "update")
):
update = config.get("job_builder", "update")
self.builder["update"] = update
def validate(self):
# Inform the user as to what is likely to happen, as they may specify
# a real jenkins instance in test mode to get the plugin info to check
# the XML generated.
if self.jenkins['user'] is None and self.jenkins['password'] is None:
if self.jenkins["user"] is None and self.jenkins["password"] is None:
logger.info("Will use anonymous access to Jenkins if needed.")
elif ((self.jenkins['user'] is not None and
self.jenkins['password'] is None) or
(self.jenkins['user'] is None and
self.jenkins['password'] is not None)):
elif (
self.jenkins["user"] is not None and self.jenkins["password"] is None
) or (self.jenkins["user"] is None and self.jenkins["password"] is not None):
raise JenkinsJobsException(
"Cannot authenticate to Jenkins with only one of User and "
"Password provided, please check your configuration."
)
if (self.builder['plugins_info'] is not None and
not isinstance(self.builder['plugins_info'], list)):
if self.builder["plugins_info"] is not None and not isinstance(
self.builder["plugins_info"], list
):
raise JenkinsJobsException("plugins_info must contain a list!")
def get_module_config(self, section, key, default=None):
@ -349,19 +361,23 @@ class JJBConfig(object):
"""
result = default
try:
result = self.config_parser.get(
section, key
)
except (configparser.NoSectionError, configparser.NoOptionError,
JenkinsJobsException) as e:
result = self.config_parser.get(section, key)
except (
configparser.NoSectionError,
configparser.NoOptionError,
JenkinsJobsException,
) as e:
# use of default ignores missing sections/options
if result is None:
logger.warning(
"You didn't set a %s neither in the yaml job definition "
"nor in the %s section, blank default value will be "
"applied:\n%s", key, section, e)
"applied:\n%s",
key,
section,
e,
)
return result
def get_plugin_config(self, plugin, key, default=None):
return self.get_module_config('plugin "{}"'.format(plugin), key,
default)
return self.get_module_config('plugin "{}"'.format(plugin), key, default)

View File

@ -4,9 +4,9 @@ import inspect
def is_sequence(arg):
return (not hasattr(arg, "strip") and
(hasattr(arg, "__getitem__") or
hasattr(arg, "__iter__")))
return not hasattr(arg, "strip") and (
hasattr(arg, "__getitem__") or hasattr(arg, "__iter__")
)
class JenkinsJobsException(Exception):
@ -14,20 +14,19 @@ class JenkinsJobsException(Exception):
class ModuleError(JenkinsJobsException):
def get_module_name(self):
frame = inspect.currentframe()
co_name = frame.f_code.co_name
module_name = '<unresolved>'
while frame and co_name != 'run':
module_name = "<unresolved>"
while frame and co_name != "run":
# XML generation called via dispatch
if co_name == 'dispatch':
if co_name == "dispatch":
data = frame.f_locals
module_name = "%s.%s" % (data['component_type'], data['name'])
module_name = "%s.%s" % (data["component_type"], data["name"])
break
# XML generation done directly by class using gen_xml or root_xml
if co_name == 'gen_xml' or co_name == 'root_xml':
data = frame.f_locals['data']
if co_name == "gen_xml" or co_name == "root_xml":
data = frame.f_locals["data"]
module_name = next(iter(data.keys()))
break
frame = frame.f_back
@ -37,47 +36,41 @@ class ModuleError(JenkinsJobsException):
class InvalidAttributeError(ModuleError):
def __init__(self, attribute_name, value, valid_values=None):
message = "'{0}' is an invalid value for attribute {1}.{2}".format(
value, self.get_module_name(), attribute_name)
value, self.get_module_name(), attribute_name
)
if is_sequence(valid_values):
message += "\nValid values include: {0}".format(
', '.join("'{0}'".format(value)
for value in valid_values))
", ".join("'{0}'".format(value) for value in valid_values)
)
super(InvalidAttributeError, self).__init__(message)
class MissingAttributeError(ModuleError):
def __init__(self, missing_attribute, module_name=None):
module = module_name or self.get_module_name()
if is_sequence(missing_attribute):
message = "One of {0} must be present in '{1}'".format(
', '.join("'{0}'".format(value)
for value in missing_attribute), module)
", ".join("'{0}'".format(value) for value in missing_attribute), module
)
else:
message = "Missing {0} from an instance of '{1}'".format(
missing_attribute, module)
missing_attribute, module
)
super(MissingAttributeError, self).__init__(message)
class AttributeConflictError(ModuleError):
def __init__(
self, attribute_name, attributes_in_conflict, module_name=None
):
def __init__(self, attribute_name, attributes_in_conflict, module_name=None):
module = module_name or self.get_module_name()
message = (
"Attribute '{0}' can not be used together with {1} in {2}".format(
attribute_name,
', '.join(
"'{0}'".format(value) for value in attributes_in_conflict
), module
)
message = "Attribute '{0}' can not be used together with {1} in {2}".format(
attribute_name,
", ".join("'{0}'".format(value) for value in attributes_in_conflict),
module,
)
super(AttributeConflictError, self).__init__(message)

View File

@ -33,18 +33,22 @@ def deep_format(obj, paramdict, allow_empty=False):
# limitations on the values in paramdict - the post-format result must
# still be valid YAML (so substituting-in a string containing quotes, for
# example, is problematic).
if hasattr(obj, 'format'):
if hasattr(obj, "format"):
try:
ret = CustomFormatter(allow_empty).format(obj, **paramdict)
except KeyError as exc:
missing_key = exc.args[0]
desc = "%s parameter missing to format %s\nGiven:\n%s" % (
missing_key, obj, pformat(paramdict))
missing_key,
obj,
pformat(paramdict),
)
raise JenkinsJobsException(desc)
except Exception:
logging.error("Problem formatting with args:\nallow_empty:"
"%s\nobj: %s\nparamdict: %s" %
(allow_empty, obj, paramdict))
logging.error(
"Problem formatting with args:\nallow_empty:"
"%s\nobj: %s\nparamdict: %s" % (allow_empty, obj, paramdict)
)
raise
elif isinstance(obj, list):
@ -55,17 +59,22 @@ def deep_format(obj, paramdict, allow_empty=False):
ret = type(obj)()
for item in obj:
try:
ret[CustomFormatter(allow_empty).format(item, **paramdict)] = \
deep_format(obj[item], paramdict, allow_empty)
ret[
CustomFormatter(allow_empty).format(item, **paramdict)
] = deep_format(obj[item], paramdict, allow_empty)
except KeyError as exc:
missing_key = exc.args[0]
desc = "%s parameter missing to format %s\nGiven:\n%s" % (
missing_key, obj, pformat(paramdict))
missing_key,
obj,
pformat(paramdict),
)
raise JenkinsJobsException(desc)
except Exception:
logging.error("Problem formatting with args:\nallow_empty:"
"%s\nobj: %s\nparamdict: %s" %
(allow_empty, obj, paramdict))
logging.error(
"Problem formatting with args:\nallow_empty:"
"%s\nobj: %s\nparamdict: %s" % (allow_empty, obj, paramdict)
)
raise
else:
ret = obj
@ -81,6 +90,7 @@ class CustomFormatter(Formatter):
Custom formatter to allow non-existing key references when formatting a
string
"""
_expr = r"""
(?<!{){({{)* # non-pair opening {
(?:obj:)? # obj:
@ -99,7 +109,7 @@ class CustomFormatter(Formatter):
# special case of returning the object if the entire string
# matches a single parameter
try:
result = re.match('^%s$' % self._expr, format_string, re.VERBOSE)
result = re.match("^%s$" % self._expr, format_string, re.VERBOSE)
except TypeError:
return format_string.format(**kwargs)
if result is not None:
@ -130,8 +140,7 @@ class CustomFormatter(Formatter):
except KeyError:
if self.allow_empty:
logger.debug(
'Found uninitialized key %s, replaced with empty string',
key
"Found uninitialized key %s, replaced with empty string", key
)
return ''
return ""
raise

View File

@ -235,9 +235,11 @@ class OrderedConstructor(BaseConstructor):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(
None, None,
'expected a mapping node, but found %s' % node.id,
node.start_mark)
None,
None,
"expected a mapping node, but found %s" % node.id,
node.start_mark,
)
mapping = OrderedDict()
for key_node, value_node in node.value:
@ -246,23 +248,26 @@ class OrderedConstructor(BaseConstructor):
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError(
'while constructing a mapping', node.start_mark,
'found unacceptable key (%s)' % exc, key_node.start_mark)
"while constructing a mapping",
node.start_mark,
"found unacceptable key (%s)" % exc,
key_node.start_mark,
)
value = self.construct_object(value_node, deep=False)
mapping[key] = value
data.update(mapping)
class OrderedRepresenter(BaseRepresenter):
def represent_yaml_mapping(self, mapping, flow_style=None):
tag = u'tag:yaml.org,2002:map'
tag = u"tag:yaml.org,2002:map"
node = self.represent_mapping(tag, mapping, flow_style=flow_style)
return node
class LocalAnchorLoader(yaml.Loader):
"""Subclass for yaml.Loader which keeps Alias between calls"""
anchors = {}
def __init__(self, *args, **kwargs):
@ -319,14 +324,13 @@ class LocalLoader(OrderedConstructor, LocalAnchorLoader):
# make sure to pop off any local settings before passing to
# the parent constructor as any unknown args may cause errors.
self.search_path = list()
if 'search_path' in kwargs:
for p in kwargs.pop('search_path'):
logger.debug("Adding '{0}' to search path for include tags"
.format(p))
if "search_path" in kwargs:
for p in kwargs.pop("search_path"):
logger.debug("Adding '{0}' to search path for include tags".format(p))
self.search_path.append(os.path.normpath(p))
if 'escape_callback' in kwargs:
self.escape_callback = kwargs.pop('escape_callback')
if "escape_callback" in kwargs:
self.escape_callback = kwargs.pop("escape_callback")
else:
self.escape_callback = self._escape
@ -334,16 +338,17 @@ class LocalLoader(OrderedConstructor, LocalAnchorLoader):
# constructor to preserve order of maps and ensure that the order of
# keys returned is consistent across multiple python versions
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
type(self).construct_yaml_map)
self.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
type(self).construct_yaml_map,
)
if hasattr(self.stream, 'name'):
self.search_path.append(os.path.normpath(
os.path.dirname(self.stream.name)))
if hasattr(self.stream, "name"):
self.search_path.append(os.path.normpath(os.path.dirname(self.stream.name)))
self.search_path.append(os.path.normpath(os.path.curdir))
def _escape(self, data):
return re.sub(r'({|})', r'\1\1', data)
return re.sub(r"({|})", r"\1\1", data)
class LocalDumper(OrderedRepresenter, yaml.Dumper):
@ -352,12 +357,10 @@ class LocalDumper(OrderedRepresenter, yaml.Dumper):
# representer to ensure conversion back looks like normal
# mapping and hides that we use OrderedDict internally
self.add_representer(OrderedDict,
type(self).represent_yaml_mapping)
self.add_representer(OrderedDict, type(self).represent_yaml_mapping)
# convert any tuples to lists as the JJB input is generally
# in list format
self.add_representer(tuple,
type(self).represent_list)
self.add_representer(tuple, type(self).represent_list)
class BaseYAMLObject(YAMLObject):
@ -366,7 +369,7 @@ class BaseYAMLObject(YAMLObject):
class J2String(BaseYAMLObject):
yaml_tag = u'!j2:'
yaml_tag = u"!j2:"
@classmethod
def from_yaml(cls, loader, node):
@ -374,7 +377,7 @@ class J2String(BaseYAMLObject):
class YamlListJoin(BaseYAMLObject):
yaml_tag = u'!join:'
yaml_tag = u"!join:"
@classmethod
def from_yaml(cls, loader, node):
@ -382,26 +385,34 @@ class YamlListJoin(BaseYAMLObject):
delimiter = node.value[0].value
if not isinstance(node.value[1], yaml.SequenceNode):
raise yaml.constructor.ConstructorError(
None, None, "expected sequence node for join data, but "
"found %s" % node.value[1].id, node.start_mark)
None,
None,
"expected sequence node for join data, but "
"found %s" % node.value[1].id,
node.start_mark,
)
return delimiter.join((v.value for v in node.value[1].value))
else:
raise yaml.constructor.ConstructorError(
None, None, "expected sequence node, but found %s" % node.id,
node.start_mark)
None,
None,
"expected sequence node, but found %s" % node.id,
node.start_mark,
)
class YamlInclude(BaseYAMLObject):
yaml_tag = u'!include:'
yaml_tag = u"!include:"
@classmethod
def _find_file(cls, filename, search_path):
for dirname in search_path:
candidate = os.path.expanduser(os.path.join(dirname, filename))
if os.path.isfile(candidate):
logger.debug("Including file '{0}' from path '{1}'"
.format(filename, dirname))
logger.debug(
"Including file '{0}' from path '{1}'".format(filename, dirname)
)
return candidate
return filename
@ -415,11 +426,14 @@ class YamlInclude(BaseYAMLObject):
filename = cls._find_file(node_str, loader.search_path)
try:
with io.open(filename, 'r', encoding='utf-8') as f:
with io.open(filename, "r", encoding="utf-8") as f:
return f.read()
except Exception:
logger.error("Failed to include file using search path: '{0}'"
.format(':'.join(loader.search_path)))
logger.error(
"Failed to include file using search path: '{0}'".format(
":".join(loader.search_path)
)
)
raise
@classmethod
@ -428,15 +442,14 @@ class YamlInclude(BaseYAMLObject):
if isinstance(contents, LazyLoader):
return contents
data = yaml.load(contents,
functools.partial(cls.yaml_loader,
search_path=loader.search_path))
data = yaml.load(
contents, functools.partial(cls.yaml_loader, search_path=loader.search_path)
)
return data
@classmethod
def _lazy_load(cls, loader, tag, node_str):
logger.info("Lazy loading of file template '{0}' enabled"
.format(node_str))
logger.info("Lazy loading of file template '{0}' enabled".format(node_str))
return LazyLoader((cls, loader, node_str))
@classmethod
@ -444,20 +457,24 @@ class YamlInclude(BaseYAMLObject):
if isinstance(node, yaml.ScalarNode):
return cls._from_file(loader, node)
elif isinstance(node, yaml.SequenceNode):
contents = [cls._from_file(loader, scalar_node)
for scalar_node in node.value]
contents = [
cls._from_file(loader, scalar_node) for scalar_node in node.value
]
if any(isinstance(s, CustomLoader) for s in contents):
return CustomLoaderCollection(contents)
return u'\n'.join(contents)
return u"\n".join(contents)
else:
raise yaml.constructor.ConstructorError(
None, None, "expected either a sequence or scalar node, but "
"found %s" % node.id, node.start_mark)
None,
None,
"expected either a sequence or scalar node, but " "found %s" % node.id,
node.start_mark,
)
class YamlIncludeRaw(YamlInclude):
yaml_tag = u'!include-raw:'
yaml_tag = u"!include-raw:"
@classmethod
def _from_file(cls, loader, node):
@ -465,23 +482,26 @@ class YamlIncludeRaw(YamlInclude):
class YamlIncludeRawEscape(YamlIncludeRaw):
yaml_tag = u'!include-raw-escape:'
yaml_tag = u"!include-raw-escape:"
@classmethod
def from_yaml(cls, loader, node):
data = YamlIncludeRaw.from_yaml(loader, node)
if isinstance(data, LazyLoader):
logger.warning("Replacing %s tag with %s since lazy loading means "
"file contents will not be deep formatted for "
"variable substitution.", cls.yaml_tag,
YamlIncludeRaw.yaml_tag)
logger.warning(
"Replacing %s tag with %s since lazy loading means "
"file contents will not be deep formatted for "
"variable substitution.",
cls.yaml_tag,
YamlIncludeRaw.yaml_tag,
)
return data
else:
return loader.escape_callback(data)
class YamlIncludeJinja2(YamlIncludeRaw):
yaml_tag = u'!include-jinja2:'
yaml_tag = u"!include-jinja2:"
@classmethod
def _from_file(cls, loader, node):
@ -492,26 +512,28 @@ class YamlIncludeJinja2(YamlIncludeRaw):
class DeprecatedTag(BaseYAMLObject):
@classmethod
def from_yaml(cls, loader, node):
logger.warning("tag '%s' is deprecated, switch to using '%s'",
cls.yaml_tag, cls._new.yaml_tag)
logger.warning(
"tag '%s' is deprecated, switch to using '%s'",
cls.yaml_tag,
cls._new.yaml_tag,
)
return cls._new.from_yaml(loader, node)
class YamlIncludeDeprecated(DeprecatedTag):
yaml_tag = u'!include'
yaml_tag = u"!include"
_new = YamlInclude
class YamlIncludeRawDeprecated(DeprecatedTag):
yaml_tag = u'!include-raw'
yaml_tag = u"!include-raw"
_new = YamlIncludeRaw
class YamlIncludeRawEscapeDeprecated(DeprecatedTag):
yaml_tag = u'!include-raw-escape'
yaml_tag = u"!include-raw-escape"
_new = YamlIncludeRawEscape
@ -525,8 +547,7 @@ class Jinja2Loader(CustomLoader):
def __init__(self, contents, search_path):
self._template = jinja2.Template(contents)
self._template.environment.undefined = jinja2.StrictUndefined
self._template.environment.loader = jinja2.FileSystemLoader(
search_path)
self._template.environment.loader = jinja2.FileSystemLoader(search_path)
self._loader = self._template.environment.loader
def format(self, **kwargs):
@ -539,11 +560,12 @@ class Jinja2Loader(CustomLoader):
class CustomLoaderCollection(object):
"""Helper class to format a collection of CustomLoader objects"""
def __init__(self, sequence):
self._data = sequence
def format(self, *args, **kwargs):
return u'\n'.join(item.format(*args, **kwargs) for item in self._data)
return u"\n".join(item.format(*args, **kwargs) for item in self._data)
class LazyLoader(CustomLoader):
@ -564,8 +586,8 @@ class LazyLoader(CustomLoader):
def format(self, *args, **kwargs):
node = yaml.ScalarNode(
tag=self._node.tag,
value=self._node.value.format(*args, **kwargs))
tag=self._node.tag, value=self._node.value.format(*args, **kwargs)
)
return self._cls.from_yaml(self._loader, node)

File diff suppressed because it is too large Load Diff

View File

@ -123,88 +123,85 @@ class General(jenkins_jobs.modules.base.Base):
logrotate_warn_issued = False
def gen_xml(self, xml, data):
jdk = data.get('jdk', None)
jdk = data.get("jdk", None)
if jdk:
XML.SubElement(xml, 'jdk').text = jdk
XML.SubElement(xml, 'actions')
desc_text = data.get('description', None)
XML.SubElement(xml, "jdk").text = jdk
XML.SubElement(xml, "actions")
desc_text = data.get("description", None)
if desc_text is not None:
description = XML.SubElement(xml, 'description')
description = XML.SubElement(xml, "description")
description.text = desc_text
XML.SubElement(xml, 'keepDependencies').text = 'false'
XML.SubElement(xml, "keepDependencies").text = "false"
# Need to ensure we support the None parameter to allow disabled to
# remain the last setting if the user purposely adds and then removes
# the disabled parameter.
# See: http://lists.openstack.org/pipermail/openstack-infra/2016-March/003980.html # noqa
disabled = data.get('disabled', None)
disabled = data.get("disabled", None)
if disabled is not None:
XML.SubElement(xml, 'disabled').text = str(disabled).lower()
XML.SubElement(xml, "disabled").text = str(disabled).lower()
if 'display-name' in data:
XML.SubElement(xml, 'displayName').text = data['display-name']
if data.get('block-downstream'):
XML.SubElement(xml,
'blockBuildWhenDownstreamBuilding').text = 'true'
if "display-name" in data:
XML.SubElement(xml, "displayName").text = data["display-name"]
if data.get("block-downstream"):
XML.SubElement(xml, "blockBuildWhenDownstreamBuilding").text = "true"
else:
XML.SubElement(xml,
'blockBuildWhenDownstreamBuilding').text = 'false'
if data.get('block-upstream'):
XML.SubElement(xml,
'blockBuildWhenUpstreamBuilding').text = 'true'
XML.SubElement(xml, "blockBuildWhenDownstreamBuilding").text = "false"
if data.get("block-upstream"):
XML.SubElement(xml, "blockBuildWhenUpstreamBuilding").text = "true"
else:
XML.SubElement(xml,
'blockBuildWhenUpstreamBuilding').text = 'false'
authtoken = data.get('auth-token', None)
XML.SubElement(xml, "blockBuildWhenUpstreamBuilding").text = "false"
authtoken = data.get("auth-token", None)
if authtoken is not None:
XML.SubElement(xml, 'authToken').text = authtoken
if data.get('concurrent'):
XML.SubElement(xml, 'concurrentBuild').text = 'true'
XML.SubElement(xml, "authToken").text = authtoken
if data.get("concurrent"):
XML.SubElement(xml, "concurrentBuild").text = "true"
else:
XML.SubElement(xml, 'concurrentBuild').text = 'false'
if 'workspace' in data:
XML.SubElement(xml, 'customWorkspace').text = \
str(data['workspace'])
if (xml.tag == 'matrix-project') and ('child-workspace' in data):
XML.SubElement(xml, 'childCustomWorkspace').text = \
str(data['child-workspace'])
if 'quiet-period' in data:
XML.SubElement(xml, 'quietPeriod').text = str(data['quiet-period'])
node = data.get('node', None)
XML.SubElement(xml, "concurrentBuild").text = "false"
if "workspace" in data:
XML.SubElement(xml, "customWorkspace").text = str(data["workspace"])
if (xml.tag == "matrix-project") and ("child-workspace" in data):
XML.SubElement(xml, "childCustomWorkspace").text = str(
data["child-workspace"]
)
if "quiet-period" in data:
XML.SubElement(xml, "quietPeriod").text = str(data["quiet-period"])
node = data.get("node", None)
if node:
XML.SubElement(xml, 'assignedNode').text = node
XML.SubElement(xml, 'canRoam').text = 'false'
XML.SubElement(xml, "assignedNode").text = node
XML.SubElement(xml, "canRoam").text = "false"
else:
XML.SubElement(xml, 'canRoam').text = 'true'
if 'retry-count' in data:
XML.SubElement(xml, 'scmCheckoutRetryCount').text = \
str(data['retry-count'])
XML.SubElement(xml, "canRoam").text = "true"
if "retry-count" in data:
XML.SubElement(xml, "scmCheckoutRetryCount").text = str(data["retry-count"])
if 'logrotate' in data:
if "logrotate" in data:
if not self.logrotate_warn_issued:
logging.warning('logrotate is deprecated on jenkins>=1.637,'
' the property build-discarder on newer'
' jenkins instead')
logging.warning(
"logrotate is deprecated on jenkins>=1.637,"
" the property build-discarder on newer"
" jenkins instead"
)
self.logrotate_warn_issued = True
lr_xml = XML.SubElement(xml, 'logRotator')
logrotate = data['logrotate']
lr_days = XML.SubElement(lr_xml, 'daysToKeep')
lr_days.text = str(logrotate.get('daysToKeep', -1))
lr_num = XML.SubElement(lr_xml, 'numToKeep')
lr_num.text = str(logrotate.get('numToKeep', -1))
lr_adays = XML.SubElement(lr_xml, 'artifactDaysToKeep')
lr_adays.text = str(logrotate.get('artifactDaysToKeep', -1))
lr_anum = XML.SubElement(lr_xml, 'artifactNumToKeep')
lr_anum.text = str(logrotate.get('artifactNumToKeep', -1))
lr_xml = XML.SubElement(xml, "logRotator")
logrotate = data["logrotate"]
lr_days = XML.SubElement(lr_xml, "daysToKeep")
lr_days.text = str(logrotate.get("daysToKeep", -1))
lr_num = XML.SubElement(lr_xml, "numToKeep")
lr_num.text = str(logrotate.get("numToKeep", -1))
lr_adays = XML.SubElement(lr_xml, "artifactDaysToKeep")
lr_adays.text = str(logrotate.get("artifactDaysToKeep", -1))
lr_anum = XML.SubElement(lr_xml, "artifactNumToKeep")
lr_anum.text = str(logrotate.get("artifactNumToKeep", -1))
if 'raw' in data:
raw(self.registry, xml, data['raw'])
if "raw" in data:
raw(self.registry, xml, data["raw"])
def raw(registry, xml_parent, data):
# documented in definition.rst since includes and docs is not working well
# For cross cutting method like this
root = XML.fromstring(data.get('xml'))
root = XML.fromstring(data.get("xml"))
remove_ignorable_whitespace(root)
xml_parent.append(root)

View File

@ -29,59 +29,62 @@ def build_trends_publisher(plugin_name, xml_element, data):
"""Appends the status thresholds.
"""
for status in ['unstable', 'failed']:
for status in ["unstable", "failed"]:
status_data = data.get(status, {})
limits = [
('total-all', 'TotalAll'),
('total-high', 'TotalHigh'),
('total-normal', 'TotalNormal'),
('total-low', 'TotalLow')]
("total-all", "TotalAll"),
("total-high", "TotalHigh"),
("total-normal", "TotalNormal"),
("total-low", "TotalLow"),
]
if only_totals is False:
limits.extend([
('new-all', 'NewAll'),
('new-high', 'NewHigh'),
('new-normal', 'NewNormal'),
('new-low', 'NewLow')])
limits.extend(
[
("new-all", "NewAll"),
("new-high", "NewHigh"),
("new-normal", "NewNormal"),
("new-low", "NewLow"),
]
)
for key, tag_suffix in limits:
tag_name = status + tag_suffix
XML.SubElement(element, tag_name).text = str(
status_data.get(key, ''))
XML.SubElement(element, tag_name).text = str(status_data.get(key, ""))
# Tuples containing: setting name, tag name, default value
settings = [
('healthy', 'healthy', ''),
('unhealthy', 'unHealthy', ''),
('health-threshold', 'thresholdLimit', 'low'),
('plugin-name', 'pluginName', plugin_name),
('default-encoding', 'defaultEncoding', ''),
('can-run-on-failed', 'canRunOnFailed', False),
('use-stable-build-as-reference', 'useStableBuildAsReference', False),
('use-previous-build-as-reference',
'usePreviousBuildAsReference', False),
('use-delta-values', 'useDeltaValues', False),
('thresholds', 'thresholds', {}),
('should-detect-modules', 'shouldDetectModules', False),
('dont-compute-new', 'dontComputeNew', True),
('do-not-resolve-relative-paths', 'doNotResolveRelativePaths', False),
('pattern', 'pattern', '')]
("healthy", "healthy", ""),
("unhealthy", "unHealthy", ""),
("health-threshold", "thresholdLimit", "low"),
("plugin-name", "pluginName", plugin_name),
("default-encoding", "defaultEncoding", ""),
("can-run-on-failed", "canRunOnFailed", False),
("use-stable-build-as-reference", "useStableBuildAsReference", False),
("use-previous-build-as-reference", "usePreviousBuildAsReference", False),
("use-delta-values", "useDeltaValues", False),
("thresholds", "thresholds", {}),
("should-detect-modules", "shouldDetectModules", False),
("dont-compute-new", "dontComputeNew", True),
("do-not-resolve-relative-paths", "doNotResolveRelativePaths", False),
("pattern", "pattern", ""),
]
thresholds = ['low', 'normal', 'high']
thresholds = ["low", "normal", "high"]
for key, tag_name, default in settings:
xml_config = XML.SubElement(xml_element, tag_name)
config_value = data.get(key, default)
if key == 'thresholds':
if key == "thresholds":
append_thresholds(
xml_config,
config_value,
data.get('dont-compute-new', True))
elif key == 'health-threshold' and config_value not in thresholds:
raise JenkinsJobsException("health-threshold must be one of %s" %
", ".join(thresholds))
xml_config, config_value, data.get("dont-compute-new", True)
)
elif key == "health-threshold" and config_value not in thresholds:
raise JenkinsJobsException(
"health-threshold must be one of %s" % ", ".join(thresholds)
)
else:
if isinstance(default, bool):
xml_config.text = str(config_value).lower()
@ -91,379 +94,385 @@ def build_trends_publisher(plugin_name, xml_element, data):
def config_file_provider_builder(xml_parent, data):
"""Builder / Wrapper helper"""
xml_files = XML.SubElement(xml_parent, 'managedFiles')
xml_files = XML.SubElement(xml_parent, "managedFiles")
files = data.get('files', [])
files = data.get("files", [])
for file in files:
xml_file = XML.SubElement(xml_files, 'org.jenkinsci.plugins.'
'configfiles.buildwrapper.ManagedFile')
xml_file = XML.SubElement(
xml_files, "org.jenkinsci.plugins." "configfiles.buildwrapper.ManagedFile"
)
mapping = [
('file-id', 'fileId', None),
('target', 'targetLocation', ''),
('variable', 'variable', ''),
('replace-tokens', 'replaceTokens', False),
("file-id", "fileId", None),
("target", "targetLocation", ""),
("variable", "variable", ""),
("replace-tokens", "replaceTokens", False),
]
convert_mapping_to_xml(xml_file, file, mapping, fail_required=True)
def config_file_provider_settings(xml_parent, data):
SETTINGS_TYPES = ['file', 'cfp']
SETTINGS_TYPES = ["file", "cfp"]
settings = {
'default-settings':
'jenkins.mvn.DefaultSettingsProvider',
'settings':
'jenkins.mvn.FilePathSettingsProvider',
'config-file-provider-settings':
'org.jenkinsci.plugins.configfiles.maven.job.MvnSettingsProvider',
'default-global-settings':
'jenkins.mvn.DefaultGlobalSettingsProvider',
'global-settings':
'jenkins.mvn.FilePathGlobalSettingsProvider',
'config-file-provider-global-settings':
'org.jenkinsci.plugins.configfiles.maven.job.'
'MvnGlobalSettingsProvider',
"default-settings": "jenkins.mvn.DefaultSettingsProvider",
"settings": "jenkins.mvn.FilePathSettingsProvider",
"config-file-provider-settings": "org.jenkinsci.plugins.configfiles.maven.job.MvnSettingsProvider",
"default-global-settings": "jenkins.mvn.DefaultGlobalSettingsProvider",
"global-settings": "jenkins.mvn.FilePathGlobalSettingsProvider",
"config-file-provider-global-settings": "org.jenkinsci.plugins.configfiles.maven.job."
"MvnGlobalSettingsProvider",
}
if 'settings' in data:
if "settings" in data:
# Support for Config File Provider
settings_file = str(data['settings'])
settings_type = data.get('settings-type', 'file')
settings_file = str(data["settings"])
settings_type = data.get("settings-type", "file")
# For cfp versions <2.10.0 we are able to detect cfp via the config
# settings name.
text = 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig'
text = "org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig"
if settings_file.startswith(text):
settings_type = 'cfp'
settings_type = "cfp"
if settings_type == 'file':
if settings_type == "file":
lsettings = XML.SubElement(
xml_parent, 'settings',
{'class': settings['settings']})
XML.SubElement(lsettings, 'path').text = settings_file
elif settings_type == 'cfp':
xml_parent, "settings", {"class": settings["settings"]}
)
XML.SubElement(lsettings, "path").text = settings_file
elif settings_type == "cfp":
lsettings = XML.SubElement(
xml_parent, 'settings',
{'class': settings['config-file-provider-settings']})
XML.SubElement(lsettings, 'settingsConfigId').text = settings_file
xml_parent,
"settings",
{"class": settings["config-file-provider-settings"]},
)
XML.SubElement(lsettings, "settingsConfigId").text = settings_file
else:
raise InvalidAttributeError(
'settings-type', settings_type, SETTINGS_TYPES)
raise InvalidAttributeError("settings-type", settings_type, SETTINGS_TYPES)
else:
XML.SubElement(xml_parent, 'settings',
{'class': settings['default-settings']})
XML.SubElement(xml_parent, "settings", {"class": settings["default-settings"]})
if 'global-settings' in data:
if "global-settings" in data:
# Support for Config File Provider
global_settings_file = str(data['global-settings'])
global_settings_type = data.get('global-settings-type', 'file')
global_settings_file = str(data["global-settings"])
global_settings_type = data.get("global-settings-type", "file")
# For cfp versions <2.10.0 we are able to detect cfp via the config
# settings name.
text = ('org.jenkinsci.plugins.configfiles.maven.'
'GlobalMavenSettingsConfig')
text = "org.jenkinsci.plugins.configfiles.maven." "GlobalMavenSettingsConfig"
if global_settings_file.startswith(text):
global_settings_type = 'cfp'
global_settings_type = "cfp"
if global_settings_type == 'file':
gsettings = XML.SubElement(xml_parent, 'globalSettings',
{'class': settings['global-settings']})
XML.SubElement(gsettings, 'path').text = global_settings_file
elif global_settings_type == 'cfp':
if global_settings_type == "file":
gsettings = XML.SubElement(
xml_parent, 'globalSettings',
{'class': settings['config-file-provider-global-settings']})
XML.SubElement(
gsettings,
'settingsConfigId').text = global_settings_file
xml_parent, "globalSettings", {"class": settings["global-settings"]}
)
XML.SubElement(gsettings, "path").text = global_settings_file
elif global_settings_type == "cfp":
gsettings = XML.SubElement(
xml_parent,
"globalSettings",
{"class": settings["config-file-provider-global-settings"]},
)
XML.SubElement(gsettings, "settingsConfigId").text = global_settings_file
else:
raise InvalidAttributeError(
'settings-type', global_settings_type, SETTINGS_TYPES)
"settings-type", global_settings_type, SETTINGS_TYPES
)
else:
XML.SubElement(xml_parent, 'globalSettings',
{'class': settings['default-global-settings']})
XML.SubElement(
xml_parent, "globalSettings", {"class": settings["default-global-settings"]}
)
def copyartifact_build_selector(xml_parent, data, select_tag='selector'):
def copyartifact_build_selector(xml_parent, data, select_tag="selector"):
select = data.get('which-build', 'last-successful')
select = data.get("which-build", "last-successful")
selectdict = {
'last-successful': 'StatusBuildSelector',
'last-completed': 'LastCompletedBuildSelector',
'specific-build': 'SpecificBuildSelector',
'last-saved': 'SavedBuildSelector',
'upstream-build': 'TriggeredBuildSelector',
'permalink': 'PermalinkBuildSelector',
'workspace-latest': 'WorkspaceSelector',
'build-param': 'ParameterizedBuildSelector',
'downstream-build': 'DownstreamBuildSelector',
'multijob-build': 'MultiJobBuildSelector'
"last-successful": "StatusBuildSelector",
"last-completed": "LastCompletedBuildSelector",
"specific-build": "SpecificBuildSelector",
"last-saved": "SavedBuildSelector",
"upstream-build": "TriggeredBuildSelector",
"permalink": "PermalinkBuildSelector",
"workspace-latest": "WorkspaceSelector",
"build-param": "ParameterizedBuildSelector",
"downstream-build": "DownstreamBuildSelector",
"multijob-build": "MultiJobBuildSelector",
}
if select not in selectdict:
raise InvalidAttributeError('which-build',
select,
selectdict.keys())
permalink = data.get('permalink', 'last')
permalinkdict = {'last': 'lastBuild',
'last-stable': 'lastStableBuild',
'last-successful': 'lastSuccessfulBuild',
'last-failed': 'lastFailedBuild',
'last-unstable': 'lastUnstableBuild',
'last-unsuccessful': 'lastUnsuccessfulBuild'}
raise InvalidAttributeError("which-build", select, selectdict.keys())
permalink = data.get("permalink", "last")
permalinkdict = {
"last": "lastBuild",
"last-stable": "lastStableBuild",
"last-successful": "lastSuccessfulBuild",
"last-failed": "lastFailedBuild",
"last-unstable": "lastUnstableBuild",
"last-unsuccessful": "lastUnsuccessfulBuild",
}
if permalink not in permalinkdict:
raise InvalidAttributeError('permalink',
permalink,
permalinkdict.keys())
if select == 'multijob-build':
selector = XML.SubElement(xml_parent, select_tag,
{'class':
'com.tikal.jenkins.plugins.multijob.' +
selectdict[select]})
raise InvalidAttributeError("permalink", permalink, permalinkdict.keys())
if select == "multijob-build":
selector = XML.SubElement(
xml_parent,
select_tag,
{"class": "com.tikal.jenkins.plugins.multijob." + selectdict[select]},
)
else:
selector = XML.SubElement(xml_parent, select_tag,
{'class':
'hudson.plugins.copyartifact.' +
selectdict[select]})
selector = XML.SubElement(
xml_parent,
select_tag,
{"class": "hudson.plugins.copyartifact." + selectdict[select]},
)
mapping = []
if select == 'specific-build':
mapping.append(('build-number', 'buildNumber', ''))
if select == 'last-successful':
mapping.append(('stable', 'stable', False))
if select == 'upstream-build':
if select == "specific-build":
mapping.append(("build-number", "buildNumber", ""))
if select == "last-successful":
mapping.append(("stable", "stable", False))
if select == "upstream-build":
mapping.append(
('fallback-to-last-successful', 'fallbackToLastSuccessful', False))
if select == 'permalink':
mapping.append(('', 'id', permalinkdict[permalink]))
if select == 'build-param':
mapping.append(('param', 'parameterName', ''))
if select == 'downstream-build':
mapping.append(
('upstream-project-name', 'upstreamProjectName', ''))
mapping.append(
('upstream-build-number', 'upstreamBuildNumber', ''))
("fallback-to-last-successful", "fallbackToLastSuccessful", False)
)
if select == "permalink":
mapping.append(("", "id", permalinkdict[permalink]))
if select == "build-param":
mapping.append(("param", "parameterName", ""))
if select == "downstream-build":
mapping.append(("upstream-project-name", "upstreamProjectName", ""))
mapping.append(("upstream-build-number", "upstreamBuildNumber", ""))
convert_mapping_to_xml(selector, data, mapping, fail_required=False)
def findbugs_settings(xml_parent, data):
# General Options
mapping = [
('rank-priority', 'isRankActivated', False),
('include-files', 'includePattern', ''),
('exclude-files', 'excludePattern', ''),
("rank-priority", "isRankActivated", False),
("include-files", "includePattern", ""),
("exclude-files", "excludePattern", ""),
]
convert_mapping_to_xml(xml_parent, data, mapping, fail_required=True)
def get_value_from_yaml_or_config_file(key, section, data, jjb_config):
return jjb_config.get_plugin_config(section, key, data.get(key, ''))
return jjb_config.get_plugin_config(section, key, data.get(key, ""))
def cloudformation_region_dict():
region_dict = {'us-east-1': 'US_East_Northern_Virginia',
'us-west-1': 'US_WEST_Northern_California',
'us-west-2': 'US_WEST_Oregon',
'eu-central-1': 'EU_Frankfurt',
'eu-west-1': 'EU_Ireland',
'ap-southeast-1': 'Asia_Pacific_Singapore',
'ap-southeast-2': 'Asia_Pacific_Sydney',
'ap-northeast-1': 'Asia_Pacific_Tokyo',
'sa-east-1': 'South_America_Sao_Paulo'}
region_dict = {
"us-east-1": "US_East_Northern_Virginia",
"us-west-1": "US_WEST_Northern_California",
"us-west-2": "US_WEST_Oregon",
"eu-central-1": "EU_Frankfurt",
"eu-west-1": "EU_Ireland",
"ap-southeast-1": "Asia_Pacific_Singapore",
"ap-southeast-2": "Asia_Pacific_Sydney",
"ap-northeast-1": "Asia_Pacific_Tokyo",
"sa-east-1": "South_America_Sao_Paulo",
}
return region_dict
def cloudformation_init(xml_parent, data, xml_tag):
cloudformation = XML.SubElement(
xml_parent, 'com.syncapse.jenkinsci.'
'plugins.awscloudformationwrapper.' + xml_tag)
return XML.SubElement(cloudformation, 'stacks')
xml_parent,
"com.syncapse.jenkinsci." "plugins.awscloudformationwrapper." + xml_tag,
)
return XML.SubElement(cloudformation, "stacks")
def cloudformation_stack(xml_parent, stack, xml_tag, stacks, region_dict):
if 'name' not in stack or stack['name'] == '':
raise MissingAttributeError('name')
if "name" not in stack or stack["name"] == "":
raise MissingAttributeError("name")
step = XML.SubElement(
stacks, 'com.syncapse.jenkinsci.plugins.'
'awscloudformationwrapper.' + xml_tag)
stacks, "com.syncapse.jenkinsci.plugins." "awscloudformationwrapper." + xml_tag
)
if xml_tag == 'SimpleStackBean':
mapping = [('prefix', 'isPrefixSelected', False)]
if xml_tag == "SimpleStackBean":
mapping = [("prefix", "isPrefixSelected", False)]
else:
parameters_value = ','.join(stack.get('parameters', []))
parameters_value = ",".join(stack.get("parameters", []))
mapping = [
('description', 'description', ''),
('', 'parameters', parameters_value),
('timeout', 'timeout', '0'),
('sleep', 'sleep', '0'),
('recipe', 'cloudFormationRecipe', None)]
("description", "description", ""),
("", "parameters", parameters_value),
("timeout", "timeout", "0"),
("sleep", "sleep", "0"),
("recipe", "cloudFormationRecipe", None),
]
cloudformation_stack_mapping = [
('name', 'stackName', None),
('access-key', 'awsAccessKey', None),
('secret-key', 'awsSecretKey', None),
('region', 'awsRegion', None, region_dict)]
("name", "stackName", None),
("access-key", "awsAccessKey", None),
("secret-key", "awsSecretKey", None),
("region", "awsRegion", None, region_dict),
]
for map in mapping:
cloudformation_stack_mapping.append(map)
convert_mapping_to_xml(step, stack,
cloudformation_stack_mapping, fail_required=True)
convert_mapping_to_xml(
step, stack, cloudformation_stack_mapping, fail_required=True
)
def include_exclude_patterns(xml_parent, data, yaml_prefix,
xml_elem_name):
def include_exclude_patterns(xml_parent, data, yaml_prefix, xml_elem_name):
xml_element = XML.SubElement(xml_parent, xml_elem_name)
XML.SubElement(xml_element, 'includePatterns').text = ','.join(
data.get(yaml_prefix + '-include-patterns', []))
XML.SubElement(xml_element, 'excludePatterns').text = ','.join(
data.get(yaml_prefix + '-exclude-patterns', []))
XML.SubElement(xml_element, "includePatterns").text = ",".join(
data.get(yaml_prefix + "-include-patterns", [])
)
XML.SubElement(xml_element, "excludePatterns").text = ",".join(
data.get(yaml_prefix + "-exclude-patterns", [])
)
def artifactory_deployment_patterns(xml_parent, data):
include_exclude_patterns(xml_parent, data, 'deployment',
'artifactDeploymentPatterns')
include_exclude_patterns(
xml_parent, data, "deployment", "artifactDeploymentPatterns"
)
def artifactory_env_vars_patterns(xml_parent, data):
include_exclude_patterns(xml_parent, data, 'env-vars',
'envVarsPatterns')
include_exclude_patterns(xml_parent, data, "env-vars", "envVarsPatterns")
def artifactory_optional_props(xml_parent, data, target):
optional_str_props = [
('scopes', 'scopes'),
('violationRecipients', 'violation-recipients'),
('blackDuckAppName', 'black-duck-app-name'),
('blackDuckAppVersion', 'black-duck-app-version'),
('blackDuckReportRecipients', 'black-duck-report-recipients'),
('blackDuckScopes', 'black-duck-scopes')
("scopes", "scopes"),
("violationRecipients", "violation-recipients"),
("blackDuckAppName", "black-duck-app-name"),
("blackDuckAppVersion", "black-duck-app-version"),
("blackDuckReportRecipients", "black-duck-report-recipients"),
("blackDuckScopes", "black-duck-scopes"),
]
for (xml_prop, yaml_prop) in optional_str_props:
XML.SubElement(xml_parent, xml_prop).text = data.get(
yaml_prop, '')
XML.SubElement(xml_parent, xml_prop).text = data.get(yaml_prop, "")
common_bool_props = [
# yaml property name, xml property name, default value
('deploy-artifacts', 'deployArtifacts', True),
('discard-old-builds', 'discardOldBuilds', False),
('discard-build-artifacts', 'discardBuildArtifacts', False),
('publish-build-info', 'deployBuildInfo', False),
('env-vars-include', 'includeEnvVars', False),
('run-checks', 'runChecks', False),
('include-publish-artifacts', 'includePublishArtifacts', False),
('license-auto-discovery', 'licenseAutoDiscovery', True),
('enable-issue-tracker-integration', 'enableIssueTrackerIntegration',
False),
('aggregate-build-issues', 'aggregateBuildIssues', False),
('black-duck-run-checks', 'blackDuckRunChecks', False),
('black-duck-include-published-artifacts',
'blackDuckIncludePublishedArtifacts', False),
('auto-create-missing-component-requests',
'autoCreateMissingComponentRequests', True),
('auto-discard-stale-component-requests',
'autoDiscardStaleComponentRequests', True),
('filter-excluded-artifacts-from-build',
'filterExcludedArtifactsFromBuild', False)
("deploy-artifacts", "deployArtifacts", True),
("discard-old-builds", "discardOldBuilds", False),
("discard-build-artifacts", "discardBuildArtifacts", False),
("publish-build-info", "deployBuildInfo", False),
("env-vars-include", "includeEnvVars", False),
("run-checks", "runChecks", False),
("include-publish-artifacts", "includePublishArtifacts", False),
("license-auto-discovery", "licenseAutoDiscovery", True),
("enable-issue-tracker-integration", "enableIssueTrackerIntegration", False),
("aggregate-build-issues", "aggregateBuildIssues", False),
("black-duck-run-checks", "blackDuckRunChecks", False),
(
"black-duck-include-published-artifacts",
"blackDuckIncludePublishedArtifacts",
False,
),
(
"auto-create-missing-component-requests",
"autoCreateMissingComponentRequests",
True,
),
(
"auto-discard-stale-component-requests",
"autoDiscardStaleComponentRequests",
True,
),
(
"filter-excluded-artifacts-from-build",
"filterExcludedArtifactsFromBuild",
False,
),
]
convert_mapping_to_xml(
xml_parent, data, common_bool_props, fail_required=True)
convert_mapping_to_xml(xml_parent, data, common_bool_props, fail_required=True)
if 'wrappers' in target:
if "wrappers" in target:
wrapper_bool_props = [
('enable-resolve-artifacts', 'enableResolveArtifacts', False),
('disable-license-auto-discovery',
'disableLicenseAutoDiscovery', False),
('record-all-dependencies',
'recordAllDependencies', False)
("enable-resolve-artifacts", "enableResolveArtifacts", False),
("disable-license-auto-discovery", "disableLicenseAutoDiscovery", False),
("record-all-dependencies", "recordAllDependencies", False),
]
convert_mapping_to_xml(
xml_parent, data, wrapper_bool_props, fail_required=True)
convert_mapping_to_xml(xml_parent, data, wrapper_bool_props, fail_required=True)
if 'publishers' in target:
if "publishers" in target:
publisher_bool_props = [
('even-if-unstable', 'evenIfUnstable', False),
('pass-identified-downstream', 'passIdentifiedDownstream', False),
('allow-promotion-of-non-staged-builds',
'allowPromotionOfNonStagedBuilds', False)
("even-if-unstable", "evenIfUnstable", False),
("pass-identified-downstream", "passIdentifiedDownstream", False),
(
"allow-promotion-of-non-staged-builds",
"allowPromotionOfNonStagedBuilds",
False,
),
]
convert_mapping_to_xml(
xml_parent, data, publisher_bool_props, fail_required=True)
xml_parent, data, publisher_bool_props, fail_required=True
)
def artifactory_common_details(details, data):
mapping = [
('name', 'artifactoryName', ''),
('url', 'artifactoryUrl', ''),
]
mapping = [("name", "artifactoryName", ""), ("url", "artifactoryUrl", "")]
convert_mapping_to_xml(details, data, mapping, fail_required=True)
def artifactory_repository(xml_parent, data, target):
if 'release' in target:
if "release" in target:
release_mapping = [
('deploy-release-repo-key', 'keyFromText', ''),
('deploy-release-repo-key', 'keyFromSelect', ''),
('deploy-dynamic-mode', 'dynamicMode', False),
("deploy-release-repo-key", "keyFromText", ""),
("deploy-release-repo-key", "keyFromSelect", ""),
("deploy-dynamic-mode", "dynamicMode", False),
]
convert_mapping_to_xml(
xml_parent, data, release_mapping, fail_required=True)
convert_mapping_to_xml(xml_parent, data, release_mapping, fail_required=True)
if 'snapshot' in target:
if "snapshot" in target:
snapshot_mapping = [
('deploy-snapshot-repo-key', 'keyFromText', ''),
('deploy-snapshot-repo-key', 'keyFromSelect', ''),
('deploy-dynamic-mode', 'dynamicMode', False),
("deploy-snapshot-repo-key", "keyFromText", ""),
("deploy-snapshot-repo-key", "keyFromSelect", ""),
("deploy-dynamic-mode", "dynamicMode", False),
]
convert_mapping_to_xml(
xml_parent, data, snapshot_mapping, fail_required=True)
convert_mapping_to_xml(xml_parent, data, snapshot_mapping, fail_required=True)
def append_git_revision_config(parent, config_def):
params = XML.SubElement(
parent, 'hudson.plugins.git.GitRevisionBuildParameters')
params = XML.SubElement(parent, "hudson.plugins.git.GitRevisionBuildParameters")
try:
# If git-revision is a boolean, the get() will
# throw an AttributeError
combine_commits = str(
config_def.get('combine-queued-commits', False)).lower()
combine_commits = str(config_def.get("combine-queued-commits", False)).lower()
except AttributeError:
combine_commits = 'false'
combine_commits = "false"
XML.SubElement(params, 'combineQueuedCommits').text = combine_commits
XML.SubElement(params, "combineQueuedCommits").text = combine_commits
def test_fairy_common(xml_element, data):
xml_element.set('plugin', 'TestFairy')
valid_max_duration = ['10m', '60m', '300m', '1440m']
xml_element.set("plugin", "TestFairy")
valid_max_duration = ["10m", "60m", "300m", "1440m"]
valid_interval = [1, 2, 5]
valid_video_quality = ['high', 'medium', 'low']
valid_video_quality = ["high", "medium", "low"]
mappings = [
# General
('apikey', 'apiKey', None),
('appfile', 'appFile', None),
('tester-groups', 'testersGroups', ''),
('notify-testers', 'notifyTesters', True),
('autoupdate', 'autoUpdate', True),
("apikey", "apiKey", None),
("appfile", "appFile", None),
("tester-groups", "testersGroups", ""),
("notify-testers", "notifyTesters", True),
("autoupdate", "autoUpdate", True),
# Session
('max-duration', 'maxDuration', '10m', valid_max_duration),
('record-on-background', 'recordOnBackground', False),
('data-only-wifi', 'dataOnlyWifi', False),
("max-duration", "maxDuration", "10m", valid_max_duration),
("record-on-background", "recordOnBackground", False),
("data-only-wifi", "dataOnlyWifi", False),
# Video
('video-enabled', 'isVideoEnabled', True),
('screenshot-interval', 'screenshotInterval', 1, valid_interval),
('video-quality', 'videoQuality', 'high', valid_video_quality),
("video-enabled", "isVideoEnabled", True),
("screenshot-interval", "screenshotInterval", 1, valid_interval),
("video-quality", "videoQuality", "high", valid_video_quality),
# Metrics
('cpu', 'cpu', True),
('memory', 'memory', True),
('logs', 'logs', True),
('network', 'network', False),
('phone-signal', 'phoneSignal', False),
('wifi', 'wifi', False),
('gps', 'gps', False),
('battery', 'battery', False),
('opengl', 'openGl', False),
("cpu", "cpu", True),
("memory", "memory", True),
("logs", "logs", True),
("network", "network", False),
("phone-signal", "phoneSignal", False),
("wifi", "wifi", False),
("gps", "gps", False),
("battery", "battery", False),
("opengl", "openGl", False),
# Advanced options
('advanced-options', 'advancedOptions', '')
("advanced-options", "advancedOptions", ""),
]
convert_mapping_to_xml(xml_element, data, mappings, fail_required=True)
@ -471,25 +480,31 @@ def test_fairy_common(xml_element, data):
def trigger_get_parameter_order(registry, plugin):
logger = logging.getLogger("%s:trigger_get_parameter_order" % __name__)
if str(registry.jjb_config.get_plugin_config(
plugin, 'param_order_from_yaml', True)).lower() == 'false':
if (
str(
registry.jjb_config.get_plugin_config(plugin, "param_order_from_yaml", True)
).lower()
== "false"
):
logger.warning(
"Using deprecated order for parameter sets in %s. It is "
"recommended that you update your job definition instead of "
"enabling use of the old hardcoded order", plugin)
"enabling use of the old hardcoded order",
plugin,
)
# deprecated order
return [
'predefined-parameters',
'git-revision',
'property-file',
'current-parameters',
'node-parameters',
'svn-revision',
'restrict-matrix-project',
'node-label-name',
'node-label',
'boolean-parameters',
"predefined-parameters",
"git-revision",
"property-file",
"current-parameters",
"node-parameters",
"svn-revision",
"restrict-matrix-project",
"node-label-name",
"node-label",
"boolean-parameters",
]
return None
@ -498,7 +513,7 @@ def trigger_get_parameter_order(registry, plugin):
def trigger_project(tconfigs, project_def, param_order=None):
logger = logging.getLogger("%s:trigger_project" % __name__)
pt_prefix = 'hudson.plugins.parameterizedtrigger.'
pt_prefix = "hudson.plugins.parameterizedtrigger."
if param_order:
parameters = param_order
else:
@ -509,88 +524,93 @@ def trigger_project(tconfigs, project_def, param_order=None):
if param_value is None:
continue
if param_type == 'predefined-parameters':
params = XML.SubElement(tconfigs, pt_prefix +
'PredefinedBuildParameters')
properties = XML.SubElement(params, 'properties')
if param_type == "predefined-parameters":
params = XML.SubElement(tconfigs, pt_prefix + "PredefinedBuildParameters")
properties = XML.SubElement(params, "properties")
properties.text = param_value
elif param_type == 'git-revision' and param_value:
if 'combine-queued-commits' in project_def:
elif param_type == "git-revision" and param_value:
if "combine-queued-commits" in project_def:
logger.warning(
"'combine-queued-commit' has moved to reside under "
"'git-revision' configuration, please update your "
"configs as support for this will be removed."
)
git_revision = {
'combine-queued-commits':
project_def['combine-queued-commits']
"combine-queued-commits": project_def["combine-queued-commits"]
}
else:
git_revision = project_def['git-revision']
git_revision = project_def["git-revision"]
append_git_revision_config(tconfigs, git_revision)
elif param_type == 'property-file':
params = XML.SubElement(tconfigs,
pt_prefix + 'FileBuildParameters')
elif param_type == "property-file":
params = XML.SubElement(tconfigs, pt_prefix + "FileBuildParameters")
property_file_mapping = [
('property-file', 'propertiesFile', None),
('fail-on-missing', 'failTriggerOnMissing', False)]
convert_mapping_to_xml(params, project_def,
property_file_mapping, fail_required=True)
if 'file-encoding' in project_def:
XML.SubElement(params, 'encoding'
).text = project_def['file-encoding']
if 'use-matrix-child-files' in project_def:
("property-file", "propertiesFile", None),
("fail-on-missing", "failTriggerOnMissing", False),
]
convert_mapping_to_xml(
params, project_def, property_file_mapping, fail_required=True
)
if "file-encoding" in project_def:
XML.SubElement(params, "encoding").text = project_def["file-encoding"]
if "use-matrix-child-files" in project_def:
# TODO: These parameters only affect execution in
# publishers of matrix projects; we should warn if they are
# used in other contexts.
use_matrix_child_files_mapping = [
('use-matrix-child-files', "useMatrixChild", None),
('matrix-child-combination-filter',
"combinationFilter", ''),
('only-exact-matrix-child-runs', "onlyExactRuns", False)]
convert_mapping_to_xml(params, project_def,
use_matrix_child_files_mapping, fail_required=True)
elif param_type == 'current-parameters' and param_value:
XML.SubElement(tconfigs, pt_prefix + 'CurrentBuildParameters')
elif param_type == 'node-parameters' and param_value:
XML.SubElement(tconfigs, pt_prefix + 'NodeParameters')
elif param_type == 'svn-revision' and param_value:
param = XML.SubElement(tconfigs, pt_prefix +
'SubversionRevisionBuildParameters')
XML.SubElement(param, 'includeUpstreamParameters').text = str(
project_def.get('include-upstream', False)).lower()
elif param_type == 'restrict-matrix-project' and param_value:
subset = XML.SubElement(tconfigs, pt_prefix +
'matrix.MatrixSubsetBuildParameters')
XML.SubElement(subset, 'filter'
).text = project_def['restrict-matrix-project']
elif (param_type == 'node-label-name' or
param_type == 'node-label'):
tag_name = ('org.jvnet.jenkins.plugins.nodelabelparameter.'
'parameterizedtrigger.NodeLabelBuildParameter')
("use-matrix-child-files", "useMatrixChild", None),
("matrix-child-combination-filter", "combinationFilter", ""),
("only-exact-matrix-child-runs", "onlyExactRuns", False),
]
convert_mapping_to_xml(
params,
project_def,
use_matrix_child_files_mapping,
fail_required=True,
)
elif param_type == "current-parameters" and param_value:
XML.SubElement(tconfigs, pt_prefix + "CurrentBuildParameters")
elif param_type == "node-parameters" and param_value:
XML.SubElement(tconfigs, pt_prefix + "NodeParameters")
elif param_type == "svn-revision" and param_value:
param = XML.SubElement(
tconfigs, pt_prefix + "SubversionRevisionBuildParameters"
)
XML.SubElement(param, "includeUpstreamParameters").text = str(
project_def.get("include-upstream", False)
).lower()
elif param_type == "restrict-matrix-project" and param_value:
subset = XML.SubElement(
tconfigs, pt_prefix + "matrix.MatrixSubsetBuildParameters"
)
XML.SubElement(subset, "filter").text = project_def[
"restrict-matrix-project"
]
elif param_type == "node-label-name" or param_type == "node-label":
tag_name = (
"org.jvnet.jenkins.plugins.nodelabelparameter."
"parameterizedtrigger.NodeLabelBuildParameter"
)
if tconfigs.find(tag_name) is not None:
# already processed and can only have one
continue
params = XML.SubElement(tconfigs, tag_name)
name = XML.SubElement(params, 'name')
if 'node-label-name' in project_def:
name.text = project_def['node-label-name']
label = XML.SubElement(params, 'nodeLabel')
if 'node-label' in project_def:
label.text = project_def['node-label']
elif param_type == 'boolean-parameters' and param_value:
params = XML.SubElement(tconfigs,
pt_prefix + 'BooleanParameters')
config_tag = XML.SubElement(params, 'configs')
param_tag_text = pt_prefix + 'BooleanParameterConfig'
name = XML.SubElement(params, "name")
if "node-label-name" in project_def:
name.text = project_def["node-label-name"]
label = XML.SubElement(params, "nodeLabel")
if "node-label" in project_def:
label.text = project_def["node-label"]
elif param_type == "boolean-parameters" and param_value:
params = XML.SubElement(tconfigs, pt_prefix + "BooleanParameters")
config_tag = XML.SubElement(params, "configs")
param_tag_text = pt_prefix + "BooleanParameterConfig"
params_list = param_value
for name, value in params_list.items():
param_tag = XML.SubElement(config_tag, param_tag_text)
mapping = [
('', 'name', name),
('', 'value', value or False)]
convert_mapping_to_xml(param_tag, project_def,
mapping, fail_required=True)
mapping = [("", "name", name), ("", "value", value or False)]
convert_mapping_to_xml(
param_tag, project_def, mapping, fail_required=True
)
def convert_mapping_to_xml(parent, data, mapping, fail_required=True):
@ -661,19 +681,17 @@ def jms_messaging_common(parent, subelement, data):
data is passed to mapper helper function to map yaml fields to XML fields
"""
namespace = XML.SubElement(parent,
subelement)
namespace = XML.SubElement(parent, subelement)
if 'override-topic' in data:
overrides = XML.SubElement(namespace, 'overrides')
XML.SubElement(overrides,
'topic').text = str(data.get('override-topic', ''))
if "override-topic" in data:
overrides = XML.SubElement(namespace, "overrides")
XML.SubElement(overrides, "topic").text = str(data.get("override-topic", ""))
mapping = [
# option, xml name, default value
("provider-name", 'providerName', ''),
("msg-type", 'messageType', 'CodeQualityChecksDone'),
("msg-props", 'messageProperties', ''),
("msg-content", 'messageContent', ''),
("provider-name", "providerName", ""),
("msg-type", "messageType", "CodeQualityChecksDone"),
("msg-props", "messageProperties", ""),
("msg-content", "messageContent", ""),
]
convert_mapping_to_xml(namespace, data, mapping, fail_required=True)

View File

@ -104,95 +104,99 @@ class HipChat(jenkins_jobs.modules.base.Base):
jjb_config = self.registry.jjb_config
if not self.authToken:
try:
self.authToken = jjb_config.get_plugin_config('hipchat',
'authtoken')
self.authToken = jjb_config.get_plugin_config("hipchat", "authtoken")
# Require that the authtoken is non-null
if self.authToken == '':
if self.authToken == "":
raise jenkins_jobs.errors.JenkinsJobsException(
"Hipchat authtoken must not be a blank string")
except (configparser.NoSectionError,
jenkins_jobs.errors.JenkinsJobsException) as e:
logger.fatal("The configuration file needs a hipchat section" +
" containing authtoken:\n{0}".format(e))
"Hipchat authtoken must not be a blank string"
)
except (
configparser.NoSectionError,
jenkins_jobs.errors.JenkinsJobsException,
) as e:
logger.fatal(
"The configuration file needs a hipchat section"
+ " containing authtoken:\n{0}".format(e)
)
sys.exit(1)
self.jenkinsUrl = jjb_config.get_plugin_config('hipchat', 'url')
self.sendAs = jjb_config.get_plugin_config('hipchat', 'send-as')
self.jenkinsUrl = jjb_config.get_plugin_config("hipchat", "url")
self.sendAs = jjb_config.get_plugin_config("hipchat", "send-as")
def gen_xml(self, xml_parent, data):
hipchat = data.get('hipchat')
if not hipchat or not hipchat.get('enabled', True):
hipchat = data.get("hipchat")
if not hipchat or not hipchat.get("enabled", True):
return
self._load_global_data()
# convert for compatibility before dispatch
if 'room' in hipchat:
if 'rooms' in hipchat:
logger.warning("Ignoring deprecated 'room' as 'rooms' also "
"defined.")
if "room" in hipchat:
if "rooms" in hipchat:
logger.warning("Ignoring deprecated 'room' as 'rooms' also " "defined.")
else:
logger.warning("'room' is deprecated, please use 'rooms'")
hipchat['rooms'] = [hipchat['room']]
hipchat["rooms"] = [hipchat["room"]]
plugin_info = self.registry.get_plugin_info("Jenkins HipChat Plugin")
version = pkg_resources.parse_version(plugin_info.get('version', '0'))
version = pkg_resources.parse_version(plugin_info.get("version", "0"))
if version >= pkg_resources.parse_version("0.1.9"):
publishers = xml_parent.find('publishers')
publishers = xml_parent.find("publishers")
if publishers is None:
publishers = XML.SubElement(xml_parent, 'publishers')
publishers = XML.SubElement(xml_parent, "publishers")
logger.warning(
"'hipchat' module supports the old plugin versions <1.9, "
"newer versions are supported via the 'publishers' module. "
"Please upgrade you job definition")
component = {'hipchat': hipchat}
return self.registry.dispatch('publisher', publishers, component)
"Please upgrade you job definition"
)
component = {"hipchat": hipchat}
return self.registry.dispatch("publisher", publishers, component)
else:
properties = xml_parent.find('properties')
properties = xml_parent.find("properties")
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
pdefhip = XML.SubElement(properties,
'jenkins.plugins.hipchat.'
'HipChatNotifier_-HipChatJobProperty')
properties = XML.SubElement(xml_parent, "properties")
pdefhip = XML.SubElement(
properties,
"jenkins.plugins.hipchat." "HipChatNotifier_-HipChatJobProperty",
)
room = XML.SubElement(pdefhip, 'room')
if 'rooms' in hipchat:
room.text = ",".join(hipchat['rooms'])
room = XML.SubElement(pdefhip, "room")
if "rooms" in hipchat:
room.text = ",".join(hipchat["rooms"])
# Handle backwards compatibility 'start-notify' but all add an element
# of standardization with notify-*
if hipchat.get('start-notify'):
logger.warning("'start-notify' is deprecated, please use "
"'notify-start'")
XML.SubElement(pdefhip, 'startNotification').text = str(
hipchat.get('notify-start', hipchat.get('start-notify',
False))).lower()
if hipchat.get("start-notify"):
logger.warning("'start-notify' is deprecated, please use " "'notify-start'")
XML.SubElement(pdefhip, "startNotification").text = str(
hipchat.get("notify-start", hipchat.get("start-notify", False))
).lower()
if version >= pkg_resources.parse_version("0.1.5"):
mapping = [
('notify-success', 'notifySuccess', False),
('notify-aborted', 'notifyAborted', False),
('notify-not-built', 'notifyNotBuilt', False),
('notify-unstable', 'notifyUnstable', False),
('notify-failure', 'notifyFailure', False),
('notify-back-to-normal', 'notifyBackToNormal', False),
("notify-success", "notifySuccess", False),
("notify-aborted", "notifyAborted", False),
("notify-not-built", "notifyNotBuilt", False),
("notify-unstable", "notifyUnstable", False),
("notify-failure", "notifyFailure", False),
("notify-back-to-normal", "notifyBackToNormal", False),
]
helpers.convert_mapping_to_xml(pdefhip,
hipchat, mapping, fail_required=True)
helpers.convert_mapping_to_xml(
pdefhip, hipchat, mapping, fail_required=True
)
publishers = xml_parent.find('publishers')
publishers = xml_parent.find("publishers")
if publishers is None:
publishers = XML.SubElement(xml_parent, 'publishers')
hippub = XML.SubElement(publishers,
'jenkins.plugins.hipchat.HipChatNotifier')
publishers = XML.SubElement(xml_parent, "publishers")
hippub = XML.SubElement(publishers, "jenkins.plugins.hipchat.HipChatNotifier")
if version >= pkg_resources.parse_version("0.1.8"):
XML.SubElement(hippub, 'buildServerUrl').text = self.jenkinsUrl
XML.SubElement(hippub, 'sendAs').text = self.sendAs
XML.SubElement(hippub, "buildServerUrl").text = self.jenkinsUrl
XML.SubElement(hippub, "sendAs").text = self.sendAs
else:
XML.SubElement(hippub, 'jenkinsUrl').text = self.jenkinsUrl
XML.SubElement(hippub, "jenkinsUrl").text = self.jenkinsUrl
XML.SubElement(hippub, 'authToken').text = self.authToken
XML.SubElement(hippub, "authToken").text = self.authToken
# The room specified here is the default room. The default is
# redundant in this case since a room must be specified. Leave empty.
XML.SubElement(hippub, 'room').text = ''
XML.SubElement(hippub, "room").text = ""

View File

@ -12,45 +12,20 @@
# Representation of the hudson.model.Result class
SUCCESS = {
'name': 'SUCCESS',
'ordinal': '0',
'color': 'BLUE',
'complete': True
}
SUCCESS = {"name": "SUCCESS", "ordinal": "0", "color": "BLUE", "complete": True}
UNSTABLE = {
'name': 'UNSTABLE',
'ordinal': '1',
'color': 'YELLOW',
'complete': True
}
UNSTABLE = {"name": "UNSTABLE", "ordinal": "1", "color": "YELLOW", "complete": True}
FAILURE = {
'name': 'FAILURE',
'ordinal': '2',
'color': 'RED',
'complete': True
}
FAILURE = {"name": "FAILURE", "ordinal": "2", "color": "RED", "complete": True}
NOTBUILD = {
'name': 'NOT_BUILD',
'ordinal': '3',
'color': 'NOTBUILD',
'complete': False
}
NOTBUILD = {"name": "NOT_BUILD", "ordinal": "3", "color": "NOTBUILD", "complete": False}
ABORTED = {
'name': 'ABORTED',
'ordinal': '4',
'color': 'ABORTED',
'complete': False
}
ABORTED = {"name": "ABORTED", "ordinal": "4", "color": "ABORTED", "complete": False}
THRESHOLDS = {
'SUCCESS': SUCCESS,
'UNSTABLE': UNSTABLE,
'FAILURE': FAILURE,
'NOT_BUILD': NOTBUILD,
'ABORTED': ABORTED
"SUCCESS": SUCCESS,
"UNSTABLE": UNSTABLE,
"FAILURE": FAILURE,
"NOT_BUILD": NOTBUILD,
"ABORTED": ABORTED,
}

View File

@ -38,13 +38,14 @@ import jenkins_jobs.modules.base
def base_metadata(registry, xml_parent, data, mtype):
pdef = XML.SubElement(xml_parent, mtype)
XML.SubElement(pdef, 'name').text = data['name']
XML.SubElement(pdef, 'generated').text = 'false'
XML.SubElement(pdef, 'parent', attrib={"class": "job-metadata",
"reference": "../../.."})
XML.SubElement(pdef, "name").text = data["name"]
XML.SubElement(pdef, "generated").text = "false"
XML.SubElement(
pdef, "parent", attrib={"class": "job-metadata", "reference": "../../.."}
)
exposed_to_env = XML.SubElement(pdef, 'exposedToEnvironment')
exposed_to_env.text = str(data.get('expose-to-env', False)).lower()
exposed_to_env = XML.SubElement(pdef, "exposedToEnvironment")
exposed_to_env.text = str(data.get("expose-to-env", False)).lower()
return pdef
@ -64,10 +65,9 @@ def string_metadata(registry, xml_parent, data):
value: bar
expose-to-env: true
"""
pdef = base_metadata(registry, xml_parent, data,
'metadata-string')
value = data.get('value', '')
XML.SubElement(pdef, 'value').text = value
pdef = base_metadata(registry, xml_parent, data, "metadata-string")
value = data.get("value", "")
XML.SubElement(pdef, "value").text = value
def number_metadata(registry, xml_parent, data):
@ -86,10 +86,9 @@ def number_metadata(registry, xml_parent, data):
value: 1
expose-to-env: true
"""
pdef = base_metadata(registry, xml_parent, data,
'metadata-number')
value = data.get('value', '')
XML.SubElement(pdef, 'value').text = value
pdef = base_metadata(registry, xml_parent, data, "metadata-number")
value = data.get("value", "")
XML.SubElement(pdef, "value").text = value
def date_metadata(registry, xml_parent, data):
@ -110,30 +109,28 @@ def date_metadata(registry, xml_parent, data):
timezone: Australia/Melbourne
expose-to-env: true
"""
pdef = base_metadata(registry, xml_parent, data,
'metadata-date')
pdef = base_metadata(registry, xml_parent, data, "metadata-date")
# TODO: convert time from any reasonable format into epoch
mval = XML.SubElement(pdef, 'value')
XML.SubElement(mval, 'time').text = data['time']
XML.SubElement(mval, 'timezone').text = data['timezone']
XML.SubElement(pdef, 'checked').text = 'true'
mval = XML.SubElement(pdef, "value")
XML.SubElement(mval, "time").text = data["time"]
XML.SubElement(mval, "timezone").text = data["timezone"]
XML.SubElement(pdef, "checked").text = "true"
class Metadata(jenkins_jobs.modules.base.Base):
sequence = 21
component_type = 'metadata'
component_list_type = 'metadata'
component_type = "metadata"
component_list_type = "metadata"
def gen_xml(self, xml_parent, data):
properties = xml_parent.find('properties')
properties = xml_parent.find("properties")
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
properties = XML.SubElement(xml_parent, "properties")
metadata = data.get('metadata', [])
metadata = data.get("metadata", [])
if metadata:
pdefp = XML.SubElement(properties,
'job-metadata', plugin="metadata@1.0b")
pdefs = XML.SubElement(pdefp, 'values')
pdefp = XML.SubElement(properties, "job-metadata", plugin="metadata@1.0b")
pdefs = XML.SubElement(pdefp, "values")
for mdata in metadata:
self.registry.dispatch('metadata', pdefs, mdata)
self.registry.dispatch("metadata", pdefs, mdata)

View File

@ -52,44 +52,44 @@ def http_endpoint(registry, xml_parent, data):
:language: yaml
"""
endpoint_element = XML.SubElement(xml_parent,
'com.tikal.hudson.plugins.notification.'
'Endpoint')
supported_formats = ['JSON', 'XML']
supported_events = ['started', 'completed', 'finalized', 'all']
fmt = data.get('format', 'JSON').upper()
event = data.get('event', 'all').lower()
endpoint_element = XML.SubElement(
xml_parent, "com.tikal.hudson.plugins.notification." "Endpoint"
)
supported_formats = ["JSON", "XML"]
supported_events = ["started", "completed", "finalized", "all"]
fmt = data.get("format", "JSON").upper()
event = data.get("event", "all").lower()
mapping = [
('', 'format', fmt, supported_formats),
('', 'protocol', 'HTTP'),
('', 'event', event, supported_events),
('timeout', 'timeout', 30000),
('url', 'url', None),
('log', 'loglines', 0),
("", "format", fmt, supported_formats),
("", "protocol", "HTTP"),
("", "event", event, supported_events),
("timeout", "timeout", 30000),
("url", "url", None),
("log", "loglines", 0),
]
helpers.convert_mapping_to_xml(
endpoint_element, data, mapping, fail_required=True)
helpers.convert_mapping_to_xml(endpoint_element, data, mapping, fail_required=True)
class Notifications(jenkins_jobs.modules.base.Base):
sequence = 22
component_type = 'notification'
component_list_type = 'notifications'
component_type = "notification"
component_list_type = "notifications"
def gen_xml(self, xml_parent, data):
properties = xml_parent.find('properties')
properties = xml_parent.find("properties")
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
properties = XML.SubElement(xml_parent, "properties")
notifications = data.get('notifications', [])
notifications = data.get("notifications", [])
if notifications:
notify_element = XML.SubElement(properties,
'com.tikal.hudson.plugins.'
'notification.'
'HudsonNotificationProperty')
endpoints_element = XML.SubElement(notify_element, 'endpoints')
notify_element = XML.SubElement(
properties,
"com.tikal.hudson.plugins."
"notification."
"HudsonNotificationProperty",
)
endpoints_element = XML.SubElement(notify_element, "endpoints")
for endpoint in notifications:
self.registry.dispatch('notification',
endpoints_element, endpoint)
self.registry.dispatch("notification", endpoints_element, endpoint)

View File

@ -43,14 +43,14 @@ import jenkins_jobs.modules.helpers as helpers
def base_param(registry, xml_parent, data, do_default, ptype):
pdef = XML.SubElement(xml_parent, ptype)
XML.SubElement(pdef, 'name').text = data['name']
XML.SubElement(pdef, 'description').text = data.get('description', '')
XML.SubElement(pdef, "name").text = data["name"]
XML.SubElement(pdef, "description").text = data.get("description", "")
if do_default:
default = data.get('default', None)
default = data.get("default", None)
if default is not None:
XML.SubElement(pdef, 'defaultValue').text = str(default)
XML.SubElement(pdef, "defaultValue").text = str(default)
else:
XML.SubElement(pdef, 'defaultValue')
XML.SubElement(pdef, "defaultValue")
return pdef
@ -70,8 +70,9 @@ def string_param(registry, xml_parent, data):
default: bar
description: "A parameter named FOO, defaults to 'bar'."
"""
base_param(registry, xml_parent, data, True,
'hudson.model.StringParameterDefinition')
base_param(
registry, xml_parent, data, True, "hudson.model.StringParameterDefinition"
)
def promoted_param(registry, xml_parent, data):
@ -92,16 +93,20 @@ def promoted_param(registry, xml_parent, data):
:language: yaml
"""
pdef = base_param(registry, xml_parent, data, False,
'hudson.plugins.promoted__builds.parameters.'
'PromotedBuildParameterDefinition')
pdef = base_param(
registry,
xml_parent,
data,
False,
"hudson.plugins.promoted__builds.parameters."
"PromotedBuildParameterDefinition",
)
try:
XML.SubElement(pdef, 'projectName').text = data['project-name']
XML.SubElement(pdef, "projectName").text = data["project-name"]
except KeyError:
raise MissingAttributeError('project-name')
raise MissingAttributeError("project-name")
XML.SubElement(pdef, 'promotionProcessName').text = data.get(
'promotion-name', None)
XML.SubElement(pdef, "promotionProcessName").text = data.get("promotion-name", None)
def password_param(registry, xml_parent, data):
@ -120,8 +125,9 @@ def password_param(registry, xml_parent, data):
default: 1HSC0Ts6E161FysGf+e1xasgsHkgleLh09JUTYnipPvw=
description: "A parameter named FOO."
"""
base_param(registry, xml_parent, data, True,
'hudson.model.PasswordParameterDefinition')
base_param(
registry, xml_parent, data, True, "hudson.model.PasswordParameterDefinition"
)
def bool_param(registry, xml_parent, data):
@ -140,9 +146,10 @@ def bool_param(registry, xml_parent, data):
default: false
description: "A parameter named FOO, defaults to 'false'."
"""
data['default'] = str(data.get('default', False)).lower()
base_param(registry, xml_parent, data, True,
'hudson.model.BooleanParameterDefinition')
data["default"] = str(data.get("default", False)).lower()
base_param(
registry, xml_parent, data, True, "hudson.model.BooleanParameterDefinition"
)
def file_param(registry, xml_parent, data):
@ -159,8 +166,9 @@ def file_param(registry, xml_parent, data):
name: test.txt
description: "Upload test.txt."
"""
base_param(registry, xml_parent, data, False,
'hudson.model.FileParameterDefinition')
base_param(
registry, xml_parent, data, False, "hudson.model.FileParameterDefinition"
)
def text_param(registry, xml_parent, data):
@ -179,8 +187,7 @@ def text_param(registry, xml_parent, data):
default: bar
description: "A parameter named FOO, defaults to 'bar'."
"""
base_param(registry, xml_parent, data, True,
'hudson.model.TextParameterDefinition')
base_param(registry, xml_parent, data, True, "hudson.model.TextParameterDefinition")
def label_param(registry, xml_parent, data):
@ -204,35 +211,41 @@ def label_param(registry, xml_parent, data):
"""
pdef = base_param(registry, xml_parent, data, True,
'org.jvnet.jenkins.plugins.nodelabelparameter.'
'LabelParameterDefinition')
pdef = base_param(
registry,
xml_parent,
data,
True,
"org.jvnet.jenkins.plugins.nodelabelparameter." "LabelParameterDefinition",
)
valid_types = ['allCases', 'success', 'unstable']
valid_types = ["allCases", "success", "unstable"]
mapping = [
('all-nodes', 'allNodesMatchingLabel', False),
('matching-label', 'triggerIfResult', 'allCases', valid_types),
("all-nodes", "allNodesMatchingLabel", False),
("matching-label", "triggerIfResult", "allCases", valid_types),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
eligibility_label = data.get('node-eligibility', 'all').lower()
eligibility_label = data.get("node-eligibility", "all").lower()
eligibility_label_dict = {
'all': 'org.jvnet.jenkins.plugins.'
'nodelabelparameter.node.'
'AllNodeEligibility',
'ignore-offline': 'org.jvnet.jenkins.plugins.'
'nodelabelparameter.node.'
'IgnoreOfflineNodeEligibility',
'ignore-temp-offline': 'org.jvnet.jenkins.plugins.'
'nodelabelparameter.node.'
'IgnoreTempOfflineNodeEligibility',
"all": "org.jvnet.jenkins.plugins."
"nodelabelparameter.node."
"AllNodeEligibility",
"ignore-offline": "org.jvnet.jenkins.plugins."
"nodelabelparameter.node."
"IgnoreOfflineNodeEligibility",
"ignore-temp-offline": "org.jvnet.jenkins.plugins."
"nodelabelparameter.node."
"IgnoreTempOfflineNodeEligibility",
}
if eligibility_label not in eligibility_label_dict:
raise InvalidAttributeError(eligibility_label, eligibility_label,
eligibility_label_dict.keys())
raise InvalidAttributeError(
eligibility_label, eligibility_label, eligibility_label_dict.keys()
)
XML.SubElement(pdef, 'nodeEligibility').set(
"class", eligibility_label_dict[eligibility_label])
XML.SubElement(pdef, "nodeEligibility").set(
"class", eligibility_label_dict[eligibility_label]
)
def node_param(registry, xml_parent, data):
@ -263,30 +276,37 @@ def node_param(registry, xml_parent, data):
:language: yaml
"""
pdef = base_param(registry, xml_parent, data, False,
'org.jvnet.jenkins.plugins.nodelabelparameter.'
'NodeParameterDefinition')
default = XML.SubElement(pdef, 'defaultSlaves')
if 'default-slaves' in data:
for slave in data['default-slaves']:
XML.SubElement(default, 'string').text = slave
allowed = XML.SubElement(pdef, 'allowedSlaves')
if 'allowed-slaves' in data:
for slave in data['allowed-slaves']:
XML.SubElement(allowed, 'string').text = slave
XML.SubElement(pdef, 'ignoreOfflineNodes').text = str(
data.get('ignore-offline-nodes', False)).lower()
pdef = base_param(
registry,
xml_parent,
data,
False,
"org.jvnet.jenkins.plugins.nodelabelparameter." "NodeParameterDefinition",
)
default = XML.SubElement(pdef, "defaultSlaves")
if "default-slaves" in data:
for slave in data["default-slaves"]:
XML.SubElement(default, "string").text = slave
allowed = XML.SubElement(pdef, "allowedSlaves")
if "allowed-slaves" in data:
for slave in data["allowed-slaves"]:
XML.SubElement(allowed, "string").text = slave
XML.SubElement(pdef, "ignoreOfflineNodes").text = str(
data.get("ignore-offline-nodes", False)
).lower()
if data.get('allowed-multiselect', False):
XML.SubElement(pdef, 'triggerIfResult').text = \
'allowMultiSelectionForConcurrentBuilds'
if data.get("allowed-multiselect", False):
XML.SubElement(
pdef, "triggerIfResult"
).text = "allowMultiSelectionForConcurrentBuilds"
else:
XML.SubElement(pdef, 'triggerIfResult').text = \
'multiSelectionDisallowed'
XML.SubElement(pdef, 'allowMultiNodeSelection').text = str(
data.get('allowed-multiselect', False)).lower()
XML.SubElement(pdef, 'triggerConcurrentBuilds').text = str(
data.get('allowed-multiselect', False)).lower()
XML.SubElement(pdef, "triggerIfResult").text = "multiSelectionDisallowed"
XML.SubElement(pdef, "allowMultiNodeSelection").text = str(
data.get("allowed-multiselect", False)
).lower()
XML.SubElement(pdef, "triggerConcurrentBuilds").text = str(
data.get("allowed-multiselect", False)
).lower()
def choice_param(registry, xml_parent, data):
@ -307,13 +327,13 @@ def choice_param(registry, xml_parent, data):
- glance
description: "On which project to run?"
"""
pdef = base_param(registry, xml_parent, data, False,
'hudson.model.ChoiceParameterDefinition')
choices = XML.SubElement(pdef, 'choices',
{'class': 'java.util.Arrays$ArrayList'})
a = XML.SubElement(choices, 'a', {'class': 'string-array'})
for choice in data['choices']:
XML.SubElement(a, 'string').text = choice
pdef = base_param(
registry, xml_parent, data, False, "hudson.model.ChoiceParameterDefinition"
)
choices = XML.SubElement(pdef, "choices", {"class": "java.util.Arrays$ArrayList"})
a = XML.SubElement(choices, "a", {"class": "string-array"})
for choice in data["choices"]:
XML.SubElement(a, "string").text = choice
def credentials_param(registry, xml_parent, data):
@ -345,30 +365,33 @@ def credentials_param(registry, xml_parent, data):
"""
cred_impl_types = {
'any': 'com.cloudbees.plugins.credentials.common.StandardCredentials',
'usernamepassword': 'com.cloudbees.plugins.credentials.impl.' +
'UsernamePasswordCredentialsImpl',
'sshkey': 'com.cloudbees.jenkins.plugins.sshcredentials.impl.' +
'BasicSSHUserPrivateKey',
'secretfile': 'org.jenkinsci.plugins.plaincredentials.impl.' +
'FileCredentialsImpl',
'secrettext': 'org.jenkinsci.plugins.plaincredentials.impl.' +
'StringCredentialsImpl',
'certificate': 'com.cloudbees.plugins.credentials.impl.' +
'CertificateCredentialsImpl'
"any": "com.cloudbees.plugins.credentials.common.StandardCredentials",
"usernamepassword": "com.cloudbees.plugins.credentials.impl."
+ "UsernamePasswordCredentialsImpl",
"sshkey": "com.cloudbees.jenkins.plugins.sshcredentials.impl."
+ "BasicSSHUserPrivateKey",
"secretfile": "org.jenkinsci.plugins.plaincredentials.impl."
+ "FileCredentialsImpl",
"secrettext": "org.jenkinsci.plugins.plaincredentials.impl."
+ "StringCredentialsImpl",
"certificate": "com.cloudbees.plugins.credentials.impl."
+ "CertificateCredentialsImpl",
}
cred_type = data.get('type', 'any').lower()
cred_type = data.get("type", "any").lower()
if cred_type not in cred_impl_types:
raise InvalidAttributeError('type', cred_type, cred_impl_types.keys())
raise InvalidAttributeError("type", cred_type, cred_impl_types.keys())
pdef = base_param(registry, xml_parent, data, False,
'com.cloudbees.plugins.credentials.' +
'CredentialsParameterDefinition')
XML.SubElement(pdef, 'defaultValue').text = data.get('default', '')
XML.SubElement(pdef, 'credentialType').text = cred_impl_types[cred_type]
XML.SubElement(pdef, 'required').text = str(data.get('required',
False)).lower()
pdef = base_param(
registry,
xml_parent,
data,
False,
"com.cloudbees.plugins.credentials." + "CredentialsParameterDefinition",
)
XML.SubElement(pdef, "defaultValue").text = data.get("default", "")
XML.SubElement(pdef, "credentialType").text = cred_impl_types[cred_type]
XML.SubElement(pdef, "required").text = str(data.get("required", False)).lower()
def run_param(registry, xml_parent, data):
@ -385,11 +408,10 @@ def run_param(registry, xml_parent, data):
:language: yaml
"""
pdef = base_param(registry, xml_parent, data, False,
'hudson.model.RunParameterDefinition')
mapping = [
('project-name', 'projectName', None),
]
pdef = base_param(
registry, xml_parent, data, False, "hudson.model.RunParameterDefinition"
)
mapping = [("project-name", "projectName", None)]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -456,43 +478,50 @@ def extended_choice_param(registry, xml_parent, data):
/../../tests/parameters/fixtures/extended-choice-param-full.yaml
:language: yaml
"""
pdef = base_param(registry, xml_parent, data, False,
'com.cwctravel.hudson.plugins.'
'extended__choice__parameter.'
'ExtendedChoiceParameterDefinition')
pdef = base_param(
registry,
xml_parent,
data,
False,
"com.cwctravel.hudson.plugins."
"extended__choice__parameter."
"ExtendedChoiceParameterDefinition",
)
choicedict = {'single-select': 'PT_SINGLE_SELECT',
'multi-select': 'PT_MULTI_SELECT',
'radio': 'PT_RADIO',
'checkbox': 'PT_CHECKBOX',
'textbox': 'PT_TEXTBOX',
'PT_SINGLE_SELECT': 'PT_SINGLE_SELECT',
'PT_MULTI_SELECT': 'PT_MULTI_SELECT',
'PT_RADIO': 'PT_RADIO',
'PT_CHECKBOX': 'PT_CHECKBOX',
'PT_TEXTBOX': 'PT_TEXTBOX'}
choicedict = {
"single-select": "PT_SINGLE_SELECT",
"multi-select": "PT_MULTI_SELECT",
"radio": "PT_RADIO",
"checkbox": "PT_CHECKBOX",
"textbox": "PT_TEXTBOX",
"PT_SINGLE_SELECT": "PT_SINGLE_SELECT",
"PT_MULTI_SELECT": "PT_MULTI_SELECT",
"PT_RADIO": "PT_RADIO",
"PT_CHECKBOX": "PT_CHECKBOX",
"PT_TEXTBOX": "PT_TEXTBOX",
}
mapping = [
('value', 'value', ''),
('visible-items', 'visibleItemCount', 5),
('multi-select-delimiter', 'multiSelectDelimiter', ','),
('quote-value', 'quoteValue', False),
('default-value', 'defaultValue', ''),
('value-description', 'descriptionPropertyValue', ''),
('type', 'type', 'single-select', choicedict),
('property-file', 'propertyFile', ''),
('property-key', 'propertyKey', ''),
('default-property-file', 'defaultPropertyFile', ''),
('default-property-key', 'defaultPropertyKey', ''),
('description-property-file', 'descriptionPropertyFile', ''),
('description-property-key', 'descriptionPropertyKey', ''),
('bindings', 'bindings', ''),
('groovy-script', 'groovyScript', ''),
('groovy-script-file', 'groovyScriptFile', ''),
('classpath', 'groovyClasspath', ''),
('default-groovy-script', 'defaultGroovyScript', ''),
('default-groovy-classpath', 'defaultGroovyClasspath', ''),
('description-groovy-script', 'descriptionGroovyScript', ''),
('description-groovy-classpath', 'descriptionGroovyClasspath', ''),
("value", "value", ""),
("visible-items", "visibleItemCount", 5),
("multi-select-delimiter", "multiSelectDelimiter", ","),
("quote-value", "quoteValue", False),
("default-value", "defaultValue", ""),
("value-description", "descriptionPropertyValue", ""),
("type", "type", "single-select", choicedict),
("property-file", "propertyFile", ""),
("property-key", "propertyKey", ""),
("default-property-file", "defaultPropertyFile", ""),
("default-property-key", "defaultPropertyKey", ""),
("description-property-file", "descriptionPropertyFile", ""),
("description-property-key", "descriptionPropertyKey", ""),
("bindings", "bindings", ""),
("groovy-script", "groovyScript", ""),
("groovy-script-file", "groovyScriptFile", ""),
("classpath", "groovyClasspath", ""),
("default-groovy-script", "defaultGroovyScript", ""),
("default-groovy-classpath", "defaultGroovyClasspath", ""),
("description-groovy-script", "descriptionGroovyScript", ""),
("description-groovy-classpath", "descriptionGroovyClasspath", ""),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -519,13 +548,15 @@ def validating_string_param(registry, xml_parent, data):
regex: [A-Za-z]*
msg: Your entered value failed validation
"""
pdef = base_param(registry, xml_parent, data, True,
'hudson.plugins.validating__string__parameter.'
'ValidatingStringParameterDefinition')
mapping = [
('regex', 'regex', None),
('msg', 'failedValidationMessage', None),
]
pdef = base_param(
registry,
xml_parent,
data,
True,
"hudson.plugins.validating__string__parameter."
"ValidatingStringParameterDefinition",
)
mapping = [("regex", "regex", None), ("msg", "failedValidationMessage", None)]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -557,17 +588,21 @@ def svn_tags_param(registry, xml_parent, data):
url: http://svn.example.com/repo
filter: [A-za-z0-9]*
"""
pdef = base_param(registry, xml_parent, data, True,
'hudson.scm.listtagsparameter.'
'ListSubversionTagsParameterDefinition')
pdef = base_param(
registry,
xml_parent,
data,
True,
"hudson.scm.listtagsparameter." "ListSubversionTagsParameterDefinition",
)
mapping = [
('url', 'tagsDir', None),
('credentials-id', 'credentialsId', ''),
('filter', 'tagsFilter', ''),
('max-tags', 'maxTags', '100'),
('sort-newest-first', 'reverseByDate', True),
('sort-z-to-a', 'reverseByName', False),
('', 'uuid', "1-1-1-1-1"),
("url", "tagsDir", None),
("credentials-id", "credentialsId", ""),
("filter", "tagsFilter", ""),
("max-tags", "maxTags", "100"),
("sort-newest-first", "reverseByDate", True),
("sort-z-to-a", "reverseByName", False),
("", "uuid", "1-1-1-1-1"),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -597,8 +632,7 @@ def dynamic_choice_param(registry, xml_parent, data):
remote: false
read-only: false
"""
dynamic_param_common(registry, xml_parent, data,
'ChoiceParameterDefinition')
dynamic_param_common(registry, xml_parent, data, "ChoiceParameterDefinition")
def dynamic_string_param(registry, xml_parent, data):
@ -626,8 +660,7 @@ def dynamic_string_param(registry, xml_parent, data):
remote: false
read-only: false
"""
dynamic_param_common(registry, xml_parent, data,
'StringParameterDefinition')
dynamic_param_common(registry, xml_parent, data, "StringParameterDefinition")
def dynamic_choice_scriptler_param(registry, xml_parent, data):
@ -663,8 +696,9 @@ def dynamic_choice_scriptler_param(registry, xml_parent, data):
remote: false
read-only: false
"""
dynamic_scriptler_param_common(registry, xml_parent, data,
'ScriptlerChoiceParameterDefinition')
dynamic_scriptler_param_common(
registry, xml_parent, data, "ScriptlerChoiceParameterDefinition"
)
def dynamic_string_scriptler_param(registry, xml_parent, data):
@ -700,54 +734,64 @@ def dynamic_string_scriptler_param(registry, xml_parent, data):
remote: false
read-only: false
"""
dynamic_scriptler_param_common(registry, xml_parent, data,
'ScriptlerStringParameterDefinition')
dynamic_scriptler_param_common(
registry, xml_parent, data, "ScriptlerStringParameterDefinition"
)
def dynamic_param_common(registry, xml_parent, data, ptype):
pdef = base_param(registry, xml_parent, data, False,
'com.seitenbau.jenkins.plugins.dynamicparameter.' +
ptype)
XML.SubElement(pdef, '__remote').text = str(
data.get('remote', False)).lower()
XML.SubElement(pdef, '__script').text = data.get('script', None)
localBaseDir = XML.SubElement(pdef, '__localBaseDirectory',
{'serialization': 'custom'})
filePath = XML.SubElement(localBaseDir, 'hudson.FilePath')
default = XML.SubElement(filePath, 'default')
XML.SubElement(filePath, 'boolean').text = "true"
XML.SubElement(default, 'remote').text = \
"/var/lib/jenkins/dynamic_parameter/classpath"
XML.SubElement(pdef, '__remoteBaseDirectory').text = \
"dynamic_parameter_classpath"
XML.SubElement(pdef, '__classPath').text = data.get('classpath', None)
XML.SubElement(pdef, 'readonlyInputField').text = str(
data.get('read-only', False)).lower()
pdef = base_param(
registry,
xml_parent,
data,
False,
"com.seitenbau.jenkins.plugins.dynamicparameter." + ptype,
)
XML.SubElement(pdef, "__remote").text = str(data.get("remote", False)).lower()
XML.SubElement(pdef, "__script").text = data.get("script", None)
localBaseDir = XML.SubElement(
pdef, "__localBaseDirectory", {"serialization": "custom"}
)
filePath = XML.SubElement(localBaseDir, "hudson.FilePath")
default = XML.SubElement(filePath, "default")
XML.SubElement(filePath, "boolean").text = "true"
XML.SubElement(
default, "remote"
).text = "/var/lib/jenkins/dynamic_parameter/classpath"
XML.SubElement(pdef, "__remoteBaseDirectory").text = "dynamic_parameter_classpath"
XML.SubElement(pdef, "__classPath").text = data.get("classpath", None)
XML.SubElement(pdef, "readonlyInputField").text = str(
data.get("read-only", False)
).lower()
def dynamic_scriptler_param_common(registry, xml_parent, data, ptype):
pdef = base_param(registry, xml_parent, data, False,
'com.seitenbau.jenkins.plugins.dynamicparameter.'
'scriptler.' + ptype)
parametersXML = XML.SubElement(pdef, '__parameters')
parameters = data.get('parameters', [])
pdef = base_param(
registry,
xml_parent,
data,
False,
"com.seitenbau.jenkins.plugins.dynamicparameter." "scriptler." + ptype,
)
parametersXML = XML.SubElement(pdef, "__parameters")
parameters = data.get("parameters", [])
if parameters:
mapping = [
('name', 'name', None),
('value', 'value', None),
]
mapping = [("name", "name", None), ("value", "value", None)]
for parameter in parameters:
parameterXML = XML.SubElement(parametersXML,
'com.seitenbau.jenkins.plugins.'
'dynamicparameter.scriptler.'
'ScriptlerParameterDefinition_'
'-ScriptParameter')
parameterXML = XML.SubElement(
parametersXML,
"com.seitenbau.jenkins.plugins."
"dynamicparameter.scriptler."
"ScriptlerParameterDefinition_"
"-ScriptParameter",
)
helpers.convert_mapping_to_xml(
parameterXML, parameter, mapping, fail_required=True)
parameterXML, parameter, mapping, fail_required=True
)
mapping = [
('script-id', '__scriptlerScriptId', None),
('remote', '__remote', False),
('read-only', 'readonlyInputField', False),
("script-id", "__scriptlerScriptId", None),
("remote", "__remote", False),
("read-only", "readonlyInputField", False),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -770,14 +814,16 @@ def matrix_combinations_param(registry, xml_parent, data):
:language: yaml
"""
element_name = 'hudson.plugins.matrix__configuration__parameter.' \
'MatrixCombinationsParameterDefinition'
element_name = (
"hudson.plugins.matrix__configuration__parameter."
"MatrixCombinationsParameterDefinition"
)
pdef = XML.SubElement(xml_parent, element_name)
mapping = [
('name', 'name', None),
('description', 'description', ''),
('filter', 'defaultCombinationFilter', ''),
("name", "name", None),
("description", "description", ""),
("filter", "defaultCombinationFilter", ""),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -806,15 +852,13 @@ def copyartifact_build_selector_param(registry, xml_parent, data):
"""
t = XML.SubElement(xml_parent, 'hudson.plugins.copyartifact.'
'BuildSelectorParameter')
mapping = [
('name', 'name', None),
('description', 'description', ''),
]
t = XML.SubElement(
xml_parent, "hudson.plugins.copyartifact." "BuildSelectorParameter"
)
mapping = [("name", "name", None), ("description", "description", "")]
helpers.convert_mapping_to_xml(t, data, mapping, fail_required=True)
helpers.copyartifact_build_selector(t, data, 'defaultSelector')
helpers.copyartifact_build_selector(t, data, "defaultSelector")
def maven_metadata_param(registry, xml_parent, data):
@ -855,31 +899,34 @@ def maven_metadata_param(registry, xml_parent, data):
:language: yaml
"""
pdef = base_param(registry, xml_parent, data, False,
'eu.markov.jenkins.plugin.mvnmeta.'
'MavenMetadataParameterDefinition')
pdef = base_param(
registry,
xml_parent,
data,
False,
"eu.markov.jenkins.plugin.mvnmeta." "MavenMetadataParameterDefinition",
)
mapping = [
('repository-base-url', 'repoBaseUrl', ''),
('artifact-group-id', 'groupId', ''),
('artifact-id', 'artifactId', ''),
('packaging', 'packaging', ''),
('default-value', 'defaultValue', ''),
('versions-filter', 'versionFilter', ''),
("repository-base-url", "repoBaseUrl", ""),
("artifact-group-id", "groupId", ""),
("artifact-id", "artifactId", ""),
("packaging", "packaging", ""),
("default-value", "defaultValue", ""),
("versions-filter", "versionFilter", ""),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
sort_order = data.get('sorting-order', 'descending').lower()
sort_dict = {'descending': 'DESC',
'ascending': 'ASC'}
sort_order = data.get("sorting-order", "descending").lower()
sort_dict = {"descending": "DESC", "ascending": "ASC"}
if sort_order not in sort_dict:
raise InvalidAttributeError(sort_order, sort_order, sort_dict.keys())
XML.SubElement(pdef, 'sortOrder').text = sort_dict[sort_order]
XML.SubElement(pdef, "sortOrder").text = sort_dict[sort_order]
mapping = [
('maximum-versions-to-display', 'maxVersions', 10),
('repository-username', 'username', ''),
('repository-password', 'password', ''),
("maximum-versions-to-display", "maxVersions", 10),
("repository-username", "username", ""),
("repository-password", "password", ""),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -901,8 +948,9 @@ def hidden_param(parser, xml_parent, data):
:language: yaml
"""
base_param(parser, xml_parent, data, True,
'com.wangyin.parameter.WHideParameterDefinition')
base_param(
parser, xml_parent, data, True, "com.wangyin.parameter.WHideParameterDefinition"
)
def random_string_param(registry, xml_parent, data):
@ -923,16 +971,17 @@ def random_string_param(registry, xml_parent, data):
/../../tests/parameters/fixtures/random-string-param001.yaml
:language: yaml
"""
pdef = XML.SubElement(xml_parent,
'hudson.plugins.random__string__parameter.'
'RandomStringParameterDefinition')
if 'name' not in data:
raise JenkinsJobsException('random-string must have a name parameter.')
pdef = XML.SubElement(
xml_parent,
"hudson.plugins.random__string__parameter." "RandomStringParameterDefinition",
)
if "name" not in data:
raise JenkinsJobsException("random-string must have a name parameter.")
mapping = [
('name', 'name', None),
('description', 'description', ''),
('failed-validation-message', 'failedValidationMessage', ''),
("name", "name", None),
("description", "description", ""),
("failed-validation-message", "failedValidationMessage", ""),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -1005,40 +1054,41 @@ def git_parameter_param(registry, xml_parent, data):
/../../tests/parameters/fixtures/git-parameter-param-full.yaml
:language: yaml
"""
pdef = XML.SubElement(xml_parent,
'net.uaznia.lukanus.hudson.plugins.gitparameter.'
'GitParameterDefinition')
pdef = XML.SubElement(
xml_parent,
"net.uaznia.lukanus.hudson.plugins.gitparameter." "GitParameterDefinition",
)
valid_types = [
'PT_TAG',
'PT_BRANCH',
'PT_BRANCH_TAG',
'PT_REVISION',
'PT_PULL_REQUEST',
"PT_TAG",
"PT_BRANCH",
"PT_BRANCH_TAG",
"PT_REVISION",
"PT_PULL_REQUEST",
]
valid_sort_modes = [
'NONE',
'ASCENDING',
'ASCENDING_SMART',
'DESCENDING',
'DESCENDING_SMART',
"NONE",
"ASCENDING",
"ASCENDING_SMART",
"DESCENDING",
"DESCENDING_SMART",
]
valid_selected_values = ['NONE', 'TOP', 'DEFAULT']
valid_selected_values = ["NONE", "TOP", "DEFAULT"]
mapping = [
('name', 'name', None),
('description', 'description', ''),
('type', 'type', 'PT_TAG', valid_types),
('branch', 'branch', ''),
('tagFilter', 'tagFilter', '*'),
('branchFilter', 'branchFilter', '.*'),
('sortMode', 'sortMode', 'NONE', valid_sort_modes),
('defaultValue', 'defaultValue', ''),
('selectedValue', 'selectedValue', 'NONE', valid_selected_values),
('useRepository', 'useRepository', ''),
('quickFilterEnabled', 'quickFilterEnabled', False),
("name", "name", None),
("description", "description", ""),
("type", "type", "PT_TAG", valid_types),
("branch", "branch", ""),
("tagFilter", "tagFilter", "*"),
("branchFilter", "branchFilter", ".*"),
("sortMode", "sortMode", "NONE", valid_sort_modes),
("defaultValue", "defaultValue", ""),
("selectedValue", "selectedValue", "NONE", valid_selected_values),
("useRepository", "useRepository", ""),
("quickFilterEnabled", "quickFilterEnabled", False),
]
helpers.convert_mapping_to_xml(pdef, data, mapping, fail_required=True)
@ -1046,28 +1096,29 @@ def git_parameter_param(registry, xml_parent, data):
class Parameters(jenkins_jobs.modules.base.Base):
sequence = 21
component_type = 'parameter'
component_list_type = 'parameters'
component_type = "parameter"
component_list_type = "parameters"
def gen_xml(self, xml_parent, data):
properties = xml_parent.find('properties')
properties = xml_parent.find("properties")
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
properties = XML.SubElement(xml_parent, "properties")
parameters = data.get('parameters', [])
hmodel = 'hudson.model.'
parameters = data.get("parameters", [])
hmodel = "hudson.model."
if parameters:
# The conditionals here are to work around the extended_choice
# parameter also being definable in the properties module. This
# usage has been deprecated but not removed. Because it may have
# added these elements before us, we need to check if they already
# exist, and only add them if they're missing.
pdefp = properties.find(hmodel + 'ParametersDefinitionProperty')
pdefp = properties.find(hmodel + "ParametersDefinitionProperty")
if pdefp is None:
pdefp = XML.SubElement(properties,
hmodel + 'ParametersDefinitionProperty')
pdefs = pdefp.find('parameterDefinitions')
pdefp = XML.SubElement(
properties, hmodel + "ParametersDefinitionProperty"
)
pdefs = pdefp.find("parameterDefinitions")
if pdefs is None:
pdefs = XML.SubElement(pdefp, 'parameterDefinitions')
pdefs = XML.SubElement(pdefp, "parameterDefinitions")
for param in parameters:
self.registry.dispatch('parameter', pdefs, param)
self.registry.dispatch("parameter", pdefs, param)

View File

@ -40,5 +40,5 @@ class ExternalJob(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
xml_parent = XML.Element('hudson.model.ExternalJob')
xml_parent = XML.Element("hudson.model.ExternalJob")
return xml_parent

View File

@ -59,15 +59,15 @@ class Flow(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
xml_parent = XML.Element('com.cloudbees.plugins.flow.BuildFlow')
xml_parent = XML.Element("com.cloudbees.plugins.flow.BuildFlow")
needs_workspace = data.get('needs-workspace', False)
needs_workspace = data.get("needs-workspace", False)
mapping = [
('dsl', 'dsl', ''),
('needs-workspace', 'buildNeedsWorkspace', False),
("dsl", "dsl", ""),
("needs-workspace", "buildNeedsWorkspace", False),
]
convert_mapping_to_xml(xml_parent, data, mapping, fail_required=True)
if needs_workspace and 'dsl-file' in data:
XML.SubElement(xml_parent, 'dslFile').text = data['dsl-file']
if needs_workspace and "dsl-file" in data:
XML.SubElement(xml_parent, "dslFile").text = data["dsl-file"]
return xml_parent

View File

@ -43,19 +43,18 @@ class Folder(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
xml_parent = XML.Element('com.cloudbees.hudson.plugins.folder.Folder',
plugin="cloudbees-folder")
attributes = {"class": "com.cloudbees.hudson.plugins.folder."
"icons.StockFolderIcon"}
XML.SubElement(xml_parent, 'icon', attrib=attributes)
XML.SubElement(xml_parent, 'views')
xml_parent = XML.Element(
"com.cloudbees.hudson.plugins.folder.Folder", plugin="cloudbees-folder"
)
attributes = {
"class": "com.cloudbees.hudson.plugins.folder." "icons.StockFolderIcon"
}
XML.SubElement(xml_parent, "icon", attrib=attributes)
XML.SubElement(xml_parent, "views")
attributes = {"class": "hudson.views.DefaultViewsTabBar"}
XML.SubElement(xml_parent, 'viewsTabBar', attrib=attributes)
XML.SubElement(xml_parent, "viewsTabBar", attrib=attributes)
mappings = [
('', 'primaryView', 'All'),
('', 'healthMetrics', ''),
]
mappings = [("", "primaryView", "All"), ("", "healthMetrics", "")]
convert_mapping_to_xml(xml_parent, data, mappings, True)
return xml_parent

View File

@ -36,5 +36,5 @@ class Freestyle(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
xml_parent = XML.Element('project')
xml_parent = XML.Element("project")
return xml_parent

View File

@ -113,139 +113,144 @@ class Matrix(jenkins_jobs.modules.base.Base):
# List the supported Axis names in our configuration
# and map them to the Jenkins XML element name.
supported_axis = {
'label-expression': 'hudson.matrix.LabelExpAxis',
'user-defined': 'hudson.matrix.TextAxis',
'slave': 'hudson.matrix.LabelAxis',
'jdk': 'hudson.matrix.JDKAxis',
'dynamic': 'ca.silvermaplesolutions.jenkins.plugins.daxis.DynamicAxis',
'python': 'jenkins.plugins.shiningpanda.matrix.PythonAxis',
'tox': 'jenkins.plugins.shiningpanda.matrix.ToxAxis',
'groovy': 'org.jenkinsci.plugins.GroovyAxis',
'yaml': 'org.jenkinsci.plugins.yamlaxis.YamlAxis',
"label-expression": "hudson.matrix.LabelExpAxis",
"user-defined": "hudson.matrix.TextAxis",
"slave": "hudson.matrix.LabelAxis",
"jdk": "hudson.matrix.JDKAxis",
"dynamic": "ca.silvermaplesolutions.jenkins.plugins.daxis.DynamicAxis",
"python": "jenkins.plugins.shiningpanda.matrix.PythonAxis",
"tox": "jenkins.plugins.shiningpanda.matrix.ToxAxis",
"groovy": "org.jenkinsci.plugins.GroovyAxis",
"yaml": "org.jenkinsci.plugins.yamlaxis.YamlAxis",
}
supported_strategies = {
# Jenkins built-in, default
'execution-strategy':
'hudson.matrix.DefaultMatrixExecutionStrategyImpl',
'yaml-strategy':
'org.jenkinsci.plugins.yamlaxis.YamlMatrixExecutionStrategy',
'p4-strategy':
'org.jenkinsci.plugins.p4.matrix.MatrixOptions'
"execution-strategy": "hudson.matrix.DefaultMatrixExecutionStrategyImpl",
"yaml-strategy": "org.jenkinsci.plugins.yamlaxis.YamlMatrixExecutionStrategy",
"p4-strategy": "org.jenkinsci.plugins.p4.matrix.MatrixOptions",
}
def root_xml(self, data):
root = XML.Element('matrix-project')
root = XML.Element("matrix-project")
# Default to 'execution-strategy'
strategies = ([s for s in data.keys() if s.endswith('-strategy')] or
['execution-strategy'])
strategies = [s for s in data.keys() if s.endswith("-strategy")] or [
"execution-strategy"
]
# Job can not have multiple strategies
if len(strategies) > 1:
raise ValueError(
'matrix-project does not support multiple strategies. '
'Given %s: %s' % (len(strategies), ', '.join(strategies)))
"matrix-project does not support multiple strategies. "
"Given %s: %s" % (len(strategies), ", ".join(strategies))
)
strategy_name = strategies[0]
if strategy_name not in self.supported_strategies:
raise ValueError(
'Given strategy %s. Only %s strategies are supported'
% (strategy_name, self.supported_strategies.keys()))
"Given strategy %s. Only %s strategies are supported"
% (strategy_name, self.supported_strategies.keys())
)
ex_r = XML.SubElement(
root, 'executionStrategy',
{'class': self.supported_strategies[strategy_name]})
root,
"executionStrategy",
{"class": self.supported_strategies[strategy_name]},
)
strategy = data.get(strategy_name, {})
if strategy_name == 'execution-strategy':
XML.SubElement(root, 'combinationFilter').text = (
str(strategy.get('combination-filter', '')).rstrip()
)
XML.SubElement(ex_r, 'runSequentially').text = (
str(strategy.get('sequential', False)).lower()
)
if 'touchstone' in strategy:
XML.SubElement(ex_r, 'touchStoneCombinationFilter').text = (
str(strategy['touchstone'].get('expr', ''))
if strategy_name == "execution-strategy":
XML.SubElement(root, "combinationFilter").text = str(
strategy.get("combination-filter", "")
).rstrip()
XML.SubElement(ex_r, "runSequentially").text = str(
strategy.get("sequential", False)
).lower()
if "touchstone" in strategy:
XML.SubElement(ex_r, "touchStoneCombinationFilter").text = str(
strategy["touchstone"].get("expr", "")
)
threshold = strategy['touchstone'].get(
'result', 'stable').upper()
supported_thresholds = ('STABLE', 'UNSTABLE')
threshold = strategy["touchstone"].get("result", "stable").upper()
supported_thresholds = ("STABLE", "UNSTABLE")
if threshold not in supported_thresholds:
raise InvalidAttributeError(
'touchstone', threshold, supported_thresholds)
"touchstone", threshold, supported_thresholds
)
# Web ui uses Stable but hudson.model.Result has Success
if threshold == 'STABLE':
threshold = 'SUCCESS'
if threshold == "STABLE":
threshold = "SUCCESS"
t_r = XML.SubElement(ex_r, 'touchStoneResultCondition')
for sub_elem in ('name', 'ordinal', 'color'):
XML.SubElement(t_r, sub_elem).text = (
hudson_model.THRESHOLDS[threshold][sub_elem])
t_r = XML.SubElement(ex_r, "touchStoneResultCondition")
for sub_elem in ("name", "ordinal", "color"):
XML.SubElement(t_r, sub_elem).text = hudson_model.THRESHOLDS[
threshold
][sub_elem]
elif strategy_name == 'yaml-strategy':
filename = str(strategy.get('filename', ''))
text = str(strategy.get('text', ''))
exclude_key = str(strategy.get('exclude-key', ''))
elif strategy_name == "yaml-strategy":
filename = str(strategy.get("filename", ""))
text = str(strategy.get("text", ""))
exclude_key = str(strategy.get("exclude-key", ""))
if bool(filename) == bool(text): # xor with str
raise ValueError('yaml-strategy must be given '
'either "filename" or "text"')
raise ValueError(
"yaml-strategy must be given " 'either "filename" or "text"'
)
yamlType = (filename and 'file') or (text and 'text')
XML.SubElement(ex_r, 'yamlType').text = yamlType
yamlType = (filename and "file") or (text and "text")
XML.SubElement(ex_r, "yamlType").text = yamlType
XML.SubElement(ex_r, 'yamlFile').text = filename
XML.SubElement(ex_r, 'yamlText').text = text
XML.SubElement(ex_r, "yamlFile").text = filename
XML.SubElement(ex_r, "yamlText").text = text
XML.SubElement(ex_r, 'excludeKey').text = exclude_key
XML.SubElement(ex_r, "excludeKey").text = exclude_key
elif strategy_name == 'p4-strategy':
XML.SubElement(ex_r, 'runSequentially').text = (
str(strategy.get('sequential', False)).lower()
)
elif strategy_name == "p4-strategy":
XML.SubElement(ex_r, "runSequentially").text = str(
strategy.get("sequential", False)
).lower()
XML.SubElement(ex_r, 'buildParent').text = (
str(strategy.get('build-parent', False)).lower()
)
XML.SubElement(ex_r, "buildParent").text = str(
strategy.get("build-parent", False)
).lower()
ax_root = XML.SubElement(root, 'axes')
for axis_ in data.get('axes', []):
axis = axis_['axis']
axis_type = axis['type']
ax_root = XML.SubElement(root, "axes")
for axis_ in data.get("axes", []):
axis = axis_["axis"]
axis_type = axis["type"]
if axis_type not in self.supported_axis:
raise ValueError('Only %s axes types are supported'
% self.supported_axis.keys())
raise ValueError(
"Only %s axes types are supported" % self.supported_axis.keys()
)
axis_name = self.supported_axis.get(axis_type)
lbl_root = XML.SubElement(ax_root, axis_name)
name, values = axis.get('name', ''), axis.get('values', [''])
if axis_type == 'jdk':
XML.SubElement(lbl_root, 'name').text = 'jdk'
elif axis_type == 'python':
XML.SubElement(lbl_root, 'name').text = 'PYTHON'
elif axis_type == 'tox':
XML.SubElement(lbl_root, 'name').text = 'TOXENV'
name, values = axis.get("name", ""), axis.get("values", [""])
if axis_type == "jdk":
XML.SubElement(lbl_root, "name").text = "jdk"
elif axis_type == "python":
XML.SubElement(lbl_root, "name").text = "PYTHON"
elif axis_type == "tox":
XML.SubElement(lbl_root, "name").text = "TOXENV"
else:
XML.SubElement(lbl_root, 'name').text = str(name)
XML.SubElement(lbl_root, "name").text = str(name)
if axis_type != "groovy":
v_root = XML.SubElement(lbl_root, 'values')
v_root = XML.SubElement(lbl_root, "values")
if axis_type == "dynamic":
XML.SubElement(v_root, 'string').text = str(values[0])
XML.SubElement(lbl_root, 'varName').text = str(values[0])
v_root = XML.SubElement(lbl_root, 'axisValues')
XML.SubElement(v_root, 'string').text = 'default'
XML.SubElement(v_root, "string").text = str(values[0])
XML.SubElement(lbl_root, "varName").text = str(values[0])
v_root = XML.SubElement(lbl_root, "axisValues")
XML.SubElement(v_root, "string").text = "default"
elif axis_type == "groovy":
command = XML.SubElement(lbl_root, 'groovyString')
command.text = axis.get('command')
XML.SubElement(lbl_root, 'computedValues').text = ''
command = XML.SubElement(lbl_root, "groovyString")
command.text = axis.get("command")
XML.SubElement(lbl_root, "computedValues").text = ""
elif axis_type == "yaml":
XML.SubElement(v_root, 'string').text = axis.get('filename')
XML.SubElement(v_root, "string").text = axis.get("filename")
else:
for v in values:
XML.SubElement(v_root, 'string').text = str(v)
XML.SubElement(v_root, "string").text = str(v)
return root

View File

@ -95,93 +95,109 @@ class Maven(jenkins_jobs.modules.base.Base):
sequence = 0
choices_private_repo = {
'default':
'hudson.maven.local_repo.DefaultLocalRepositoryLocator',
'local-to-workspace':
'hudson.maven.local_repo.PerJobLocalRepositoryLocator',
'local-to-executor':
'hudson.maven.local_repo.PerExecutorLocalRepositoryLocator',
"default": "hudson.maven.local_repo.DefaultLocalRepositoryLocator",
"local-to-workspace": "hudson.maven.local_repo.PerJobLocalRepositoryLocator",
"local-to-executor": "hudson.maven.local_repo.PerExecutorLocalRepositoryLocator",
}
def root_xml(self, data):
xml_parent = XML.Element('maven2-moduleset')
if 'maven' not in data:
xml_parent = XML.Element("maven2-moduleset")
if "maven" not in data:
return xml_parent
# determine version of plugin
plugin_info = self.registry.get_plugin_info("Maven Integration plugin")
version = pkg_resources.parse_version(plugin_info.get('version', '0'))
version = pkg_resources.parse_version(plugin_info.get("version", "0"))
if 'root-module' in data['maven']:
root_module = XML.SubElement(xml_parent, 'rootModule')
XML.SubElement(root_module, 'groupId').text = \
data['maven']['root-module']['group-id']
XML.SubElement(root_module, 'artifactId').text = \
data['maven']['root-module']['artifact-id']
XML.SubElement(xml_parent, 'goals').text = data['maven']['goals']
if "root-module" in data["maven"]:
root_module = XML.SubElement(xml_parent, "rootModule")
XML.SubElement(root_module, "groupId").text = data["maven"]["root-module"][
"group-id"
]
XML.SubElement(root_module, "artifactId").text = data["maven"][
"root-module"
]["artifact-id"]
XML.SubElement(xml_parent, "goals").text = data["maven"]["goals"]
maven_opts = data['maven'].get('maven-opts')
maven_opts = data["maven"].get("maven-opts")
if maven_opts:
XML.SubElement(xml_parent, 'mavenOpts').text = maven_opts
XML.SubElement(xml_parent, "mavenOpts").text = maven_opts
maven_name = data['maven'].get('maven-name')
maven_name = data["maven"].get("maven-name")
if maven_name:
XML.SubElement(xml_parent, 'mavenName').text = maven_name
XML.SubElement(xml_parent, "mavenName").text = maven_name
private_repo = data['maven'].get('private-repository')
private_repo = data["maven"].get("private-repository")
if private_repo:
if private_repo not in self.choices_private_repo.keys():
raise ValueError('Not a valid private-repository "%s", '
'must be one of "%s"' %
(private_repo,
", ".join(self.choices_private_repo.keys())))
XML.SubElement(xml_parent,
'localRepository',
attrib={'class':
self.choices_private_repo[private_repo]})
raise ValueError(
'Not a valid private-repository "%s", '
'must be one of "%s"'
% (private_repo, ", ".join(self.choices_private_repo.keys()))
)
XML.SubElement(
xml_parent,
"localRepository",
attrib={"class": self.choices_private_repo[private_repo]},
)
XML.SubElement(xml_parent, 'ignoreUpstremChanges').text = str(
data['maven'].get('ignore-upstream-changes', True)).lower()
XML.SubElement(xml_parent, "ignoreUpstremChanges").text = str(
data["maven"].get("ignore-upstream-changes", True)
).lower()
XML.SubElement(xml_parent, 'rootPOM').text = \
data['maven'].get('root-pom', 'pom.xml')
XML.SubElement(xml_parent, 'aggregatorStyleBuild').text = str(
not data['maven'].get('parallel-build-modules', False)).lower()
XML.SubElement(xml_parent, 'incrementalBuild').text = str(
data['maven'].get('incremental-build', False)).lower()
XML.SubElement(xml_parent, 'siteArchivingDisabled').text = str(
not data['maven'].get('automatic-site-archiving', True)).lower()
XML.SubElement(xml_parent, 'fingerprintingDisabled').text = str(
not data['maven'].get('automatic-fingerprinting', True)).lower()
if (version > pkg_resources.parse_version('0') and
version < pkg_resources.parse_version('2.0.1')):
XML.SubElement(xml_parent, 'perModuleEmail').text = str(
data.get('per-module-email', True)).lower()
XML.SubElement(xml_parent, 'archivingDisabled').text = str(
not data['maven'].get('automatic-archiving', True)).lower()
XML.SubElement(xml_parent, 'resolveDependencies').text = str(
data['maven'].get('resolve-dependencies', False)).lower()
XML.SubElement(xml_parent, 'processPlugins').text = str(
data['maven'].get('process-plugins', False)).lower()
XML.SubElement(xml_parent, 'mavenValidationLevel').text = '-1'
XML.SubElement(xml_parent, 'runHeadless').text = str(
data['maven'].get('run-headless', False)).lower()
XML.SubElement(xml_parent, 'disableTriggerDownstreamProjects').text = \
str(data['maven'].get('disable-downstream', False)).lower()
if 'custom-workspace' in data['maven']:
XML.SubElement(xml_parent, 'customWorkspace').text = str(
data['maven'].get('custom-workspace'))
helpers.config_file_provider_settings(xml_parent, data['maven'])
XML.SubElement(xml_parent, "rootPOM").text = data["maven"].get(
"root-pom", "pom.xml"
)
XML.SubElement(xml_parent, "aggregatorStyleBuild").text = str(
not data["maven"].get("parallel-build-modules", False)
).lower()
XML.SubElement(xml_parent, "incrementalBuild").text = str(
data["maven"].get("incremental-build", False)
).lower()
XML.SubElement(xml_parent, "siteArchivingDisabled").text = str(
not data["maven"].get("automatic-site-archiving", True)
).lower()
XML.SubElement(xml_parent, "fingerprintingDisabled").text = str(
not data["maven"].get("automatic-fingerprinting", True)
).lower()
if version > pkg_resources.parse_version(
"0"
) and version < pkg_resources.parse_version("2.0.1"):
XML.SubElement(xml_parent, "perModuleEmail").text = str(
data.get("per-module-email", True)
).lower()
XML.SubElement(xml_parent, "archivingDisabled").text = str(
not data["maven"].get("automatic-archiving", True)
).lower()
XML.SubElement(xml_parent, "resolveDependencies").text = str(
data["maven"].get("resolve-dependencies", False)
).lower()
XML.SubElement(xml_parent, "processPlugins").text = str(
data["maven"].get("process-plugins", False)
).lower()
XML.SubElement(xml_parent, "mavenValidationLevel").text = "-1"
XML.SubElement(xml_parent, "runHeadless").text = str(
data["maven"].get("run-headless", False)
).lower()
XML.SubElement(xml_parent, "disableTriggerDownstreamProjects").text = str(
data["maven"].get("disable-downstream", False)
).lower()
if "custom-workspace" in data["maven"]:
XML.SubElement(xml_parent, "customWorkspace").text = str(
data["maven"].get("custom-workspace")
)
helpers.config_file_provider_settings(xml_parent, data["maven"])
run_post_steps = XML.SubElement(xml_parent, 'runPostStepsIfResult')
run_conditions = ['SUCCESS', 'UNSTABLE', 'FAILURE']
run_condition = data['maven'].get('post-step-run-condition', 'FAILURE')
run_post_steps = XML.SubElement(xml_parent, "runPostStepsIfResult")
run_conditions = ["SUCCESS", "UNSTABLE", "FAILURE"]
run_condition = data["maven"].get("post-step-run-condition", "FAILURE")
if run_condition not in run_conditions:
raise InvalidAttributeError('post-step-run-condition',
run_condition, run_conditions)
raise InvalidAttributeError(
"post-step-run-condition", run_condition, run_conditions
)
cond_dict = hudson_model.THRESHOLDS[run_condition]
XML.SubElement(run_post_steps, 'name').text = cond_dict['name']
XML.SubElement(run_post_steps, 'ordinal').text = cond_dict['ordinal']
XML.SubElement(run_post_steps, 'color').text = cond_dict['color']
XML.SubElement(run_post_steps, "name").text = cond_dict["name"]
XML.SubElement(run_post_steps, "ordinal").text = cond_dict["ordinal"]
XML.SubElement(run_post_steps, "color").text = cond_dict["color"]
return xml_parent

File diff suppressed because it is too large Load Diff

View File

@ -59,6 +59,7 @@ class MultiJob(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
xml_parent = XML.Element('com.tikal.jenkins.plugins.multijob.'
'MultiJobProject')
xml_parent = XML.Element(
"com.tikal.jenkins.plugins.multijob." "MultiJobProject"
)
return xml_parent

View File

@ -82,31 +82,38 @@ import jenkins_jobs.modules.base
class Pipeline(jenkins_jobs.modules.base.Base):
sequence = 0
error_msg = ("You cannot declare both 'dsl' and 'pipeline-scm' on a "
"pipeline job")
error_msg = "You cannot declare both 'dsl' and 'pipeline-scm' on a " "pipeline job"
def root_xml(self, data):
xml_parent = XML.Element('flow-definition',
{'plugin': 'workflow-job'})
if 'dsl' in data and 'pipeline-scm' in data:
xml_parent = XML.Element("flow-definition", {"plugin": "workflow-job"})
if "dsl" in data and "pipeline-scm" in data:
raise JenkinsJobsException(self.error_msg)
if 'dsl' in data:
xml_definition = XML.SubElement(xml_parent, 'definition',
{'plugin': 'workflow-cps',
'class': 'org.jenkinsci.plugins.'
'workflow.cps.CpsFlowDefinition'})
XML.SubElement(xml_definition, 'script').text = data['dsl']
elif 'pipeline-scm' in data:
xml_definition = XML.SubElement(xml_parent, 'definition', {
'plugin': 'workflow-cps',
'class': 'org.jenkinsci.plugins.workflow.cps.'
'CpsScmFlowDefinition'})
if "dsl" in data:
xml_definition = XML.SubElement(
xml_parent,
"definition",
{
"plugin": "workflow-cps",
"class": "org.jenkinsci.plugins." "workflow.cps.CpsFlowDefinition",
},
)
XML.SubElement(xml_definition, "script").text = data["dsl"]
elif "pipeline-scm" in data:
xml_definition = XML.SubElement(
xml_parent,
"definition",
{
"plugin": "workflow-cps",
"class": "org.jenkinsci.plugins.workflow.cps."
"CpsScmFlowDefinition",
},
)
else:
raise JenkinsJobsException("Either 'dsl' or 'pipeline-scm' "
"is required for pipeline job")
raise JenkinsJobsException(
"Either 'dsl' or 'pipeline-scm' " "is required for pipeline job"
)
needs_workspace = data.get('sandbox', False)
XML.SubElement(xml_definition, 'sandbox').text = str(
needs_workspace).lower()
needs_workspace = data.get("sandbox", False)
XML.SubElement(xml_definition, "sandbox").text = str(needs_workspace).lower()
return xml_parent

View File

@ -59,22 +59,21 @@ class Workflow(jenkins_jobs.modules.base.Base):
def root_xml(self, data):
logger = logging.getLogger(__name__)
logger.warning(
"Workflow job type is deprecated, please use Pipeline job type"
logger.warning("Workflow job type is deprecated, please use Pipeline job type")
xml_parent = XML.Element("flow-definition", {"plugin": "workflow-job"})
xml_definition = XML.SubElement(
xml_parent,
"definition",
{
"plugin": "workflow-cps",
"class": "org.jenkinsci.plugins." "workflow.cps.CpsFlowDefinition",
},
)
xml_parent = XML.Element('flow-definition',
{'plugin': 'workflow-job'})
xml_definition = XML.SubElement(xml_parent, 'definition',
{'plugin': 'workflow-cps',
'class': 'org.jenkinsci.plugins.'
'workflow.cps.CpsFlowDefinition'})
mapping = [
('dsl', 'script', None),
('sandbox', 'sandbox', False),
]
mapping = [("dsl", "script", None), ("sandbox", "sandbox", False)]
helpers.convert_mapping_to_xml(
xml_definition, data, mapping, fail_required=True)
xml_definition, data, mapping, fail_required=True
)
return xml_parent

View File

@ -59,16 +59,17 @@ def builds_chain_fingerprinter(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/fingerprinter.yaml
:language: yaml
"""
fingerprinter = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.'
'buildschainfingerprinter.'
'AutomaticFingerprintJobProperty')
fingerprinter = XML.SubElement(
xml_parent,
"org.jenkinsci.plugins."
"buildschainfingerprinter."
"AutomaticFingerprintJobProperty",
)
mapping = [
('per-builds-chain', 'isPerBuildsChainEnabled', False),
('per-job-chain', 'isPerJobsChainEnabled', False),
("per-builds-chain", "isPerBuildsChainEnabled", False),
("per-job-chain", "isPerJobsChainEnabled", False),
]
helpers.convert_mapping_to_xml(
fingerprinter, data, mapping, fail_required=True)
helpers.convert_mapping_to_xml(fingerprinter, data, mapping, fail_required=True)
def ownership(registry, xml_parent, data):
@ -87,16 +88,17 @@ def ownership(registry, xml_parent, data):
"""
ownership_plugin = XML.SubElement(
xml_parent,
'com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty')
ownership = XML.SubElement(ownership_plugin, 'ownership')
owner = str(data.get('enabled', True)).lower()
XML.SubElement(ownership, 'ownershipEnabled').text = owner
"com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty",
)
ownership = XML.SubElement(ownership_plugin, "ownership")
owner = str(data.get("enabled", True)).lower()
XML.SubElement(ownership, "ownershipEnabled").text = owner
XML.SubElement(ownership, 'primaryOwnerId').text = data.get('owner')
XML.SubElement(ownership, "primaryOwnerId").text = data.get("owner")
coownersIds = XML.SubElement(ownership, 'coownersIds')
for coowner in data.get('co-owners', []):
XML.SubElement(coownersIds, 'string').text = coowner
coownersIds = XML.SubElement(ownership, "coownersIds")
for coowner in data.get("co-owners", []):
XML.SubElement(coownersIds, "string").text = coowner
def promoted_build(registry, xml_parent, data):
@ -115,13 +117,14 @@ def promoted_build(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/promoted_build.yaml
:language: yaml
"""
promoted = XML.SubElement(xml_parent, 'hudson.plugins.promoted__builds.'
'JobPropertyImpl')
names = data.get('names', [])
promoted = XML.SubElement(
xml_parent, "hudson.plugins.promoted__builds." "JobPropertyImpl"
)
names = data.get("names", [])
if names:
active_processes = XML.SubElement(promoted, 'activeProcessNames')
active_processes = XML.SubElement(promoted, "activeProcessNames")
for n in names:
XML.SubElement(active_processes, 'string').text = str(n)
XML.SubElement(active_processes, "string").text = str(n)
def gitbucket(parser, xml_parent, data):
@ -143,15 +146,12 @@ def gitbucket(parser, xml_parent, data):
:language: yaml
"""
gitbucket = XML.SubElement(
xml_parent, 'org.jenkinsci.plugins.gitbucket.GitBucketProjectProperty')
gitbucket.set('plugin', 'gitbucket')
xml_parent, "org.jenkinsci.plugins.gitbucket.GitBucketProjectProperty"
)
gitbucket.set("plugin", "gitbucket")
mapping = [
('url', 'url', None),
('link-enabled', 'linkEnabled', False),
]
helpers.convert_mapping_to_xml(
gitbucket, data, mapping, fail_required=True)
mapping = [("url", "url", None), ("link-enabled", "linkEnabled", False)]
helpers.convert_mapping_to_xml(gitbucket, data, mapping, fail_required=True)
def github(registry, xml_parent, data):
@ -174,13 +174,11 @@ def github(registry, xml_parent, data):
:language: yaml
"""
github = XML.SubElement(
xml_parent, 'com.coravy.hudson.plugins.github.GithubProjectProperty')
github.set('plugin', 'github')
xml_parent, "com.coravy.hudson.plugins.github.GithubProjectProperty"
)
github.set("plugin", "github")
mapping = [
('url', 'projectUrl', None),
('display-name', 'displayName', ''),
]
mapping = [("url", "projectUrl", None), ("display-name", "displayName", "")]
helpers.convert_mapping_to_xml(github, data, mapping, fail_required=True)
@ -197,12 +195,11 @@ def gitlab(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/gitlab.yaml
:language: yaml
"""
gitlab = XML.SubElement(xml_parent,
'com.dabsquared.gitlabjenkins.connection.'
'GitLabConnectionProperty')
mapping = [
('connection', 'gitLabConnection', None),
]
gitlab = XML.SubElement(
xml_parent,
"com.dabsquared.gitlabjenkins.connection." "GitLabConnectionProperty",
)
mapping = [("connection", "gitLabConnection", None)]
helpers.convert_mapping_to_xml(gitlab, data, mapping, fail_required=True)
@ -219,12 +216,10 @@ def gitlab_logo(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/gitlab-logo.yaml
:language: yaml
"""
logo = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.gitlablogo.'
'GitlabLogoProperty')
mapping = [
('repository-name', 'repositoryName', None)
]
logo = XML.SubElement(
xml_parent, "org.jenkinsci.plugins.gitlablogo." "GitlabLogoProperty"
)
mapping = [("repository-name", "repositoryName", None)]
helpers.convert_mapping_to_xml(logo, data, mapping, fail_required=True)
@ -238,9 +233,7 @@ def disk_usage(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/disk-usage.yaml
:language: yaml
"""
XML.SubElement(xml_parent,
'hudson.plugins.disk__usage.'
'DiskUsageProperty')
XML.SubElement(xml_parent, "hudson.plugins.disk__usage." "DiskUsageProperty")
def least_load(registry, xml_parent, data):
@ -255,12 +248,11 @@ def least_load(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/least-load002.yaml
:language: yaml
"""
least = XML.SubElement(xml_parent,
'org.bstick12.jenkinsci.plugins.leastload.'
'LeastLoadDisabledProperty')
mapping = [
('disabled', 'leastLoadDisabled', True),
]
least = XML.SubElement(
xml_parent,
"org.bstick12.jenkinsci.plugins.leastload." "LeastLoadDisabledProperty",
)
mapping = [("disabled", "leastLoadDisabled", True)]
helpers.convert_mapping_to_xml(least, data, mapping, fail_required=True)
@ -288,43 +280,41 @@ def throttle(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/throttle001.yaml
:language: yaml
"""
throttle = XML.SubElement(xml_parent,
'hudson.plugins.throttleconcurrents.'
'ThrottleJobProperty')
throttle = XML.SubElement(
xml_parent, "hudson.plugins.throttleconcurrents." "ThrottleJobProperty"
)
mapping = [
('max-per-node', 'maxConcurrentPerNode', '0'),
('max-total', 'maxConcurrentTotal', '0'),
('enabled', 'throttleEnabled', True),
("max-per-node", "maxConcurrentPerNode", "0"),
("max-total", "maxConcurrentTotal", "0"),
("enabled", "throttleEnabled", True),
]
helpers.convert_mapping_to_xml(throttle, data, mapping, fail_required=True)
cat = data.get('categories', [])
cat = data.get("categories", [])
if cat:
cn = XML.SubElement(throttle, 'categories')
cn = XML.SubElement(throttle, "categories")
for c in cat:
XML.SubElement(cn, 'string').text = str(c)
XML.SubElement(cn, "string").text = str(c)
options_list = ('category', 'project')
option = data.get('option')
options_list = ("category", "project")
option = data.get("option")
if option not in options_list:
raise InvalidAttributeError('option', option, options_list)
raise InvalidAttributeError("option", option, options_list)
mapping = [
('', 'throttleOption', option),
('', 'configVersion', '1'),
('parameters-limit', 'limitOneJobWithMatchingParams', False),
("", "throttleOption", option),
("", "configVersion", "1"),
("parameters-limit", "limitOneJobWithMatchingParams", False),
]
helpers.convert_mapping_to_xml(throttle, data, mapping, fail_required=True)
matrixopt = XML.SubElement(throttle, 'matrixOptions')
matrixopt = XML.SubElement(throttle, "matrixOptions")
mapping = [
('matrix-builds', 'throttleMatrixBuilds', True),
('matrix-configs', 'throttleMatrixConfigurations', False),
("matrix-builds", "throttleMatrixBuilds", True),
("matrix-configs", "throttleMatrixConfigurations", False),
]
helpers.convert_mapping_to_xml(
matrixopt, data, mapping, fail_required=True)
helpers.convert_mapping_to_xml(matrixopt, data, mapping, fail_required=True)
params_to_use = data.get('parameters-check-list', [])
XML.SubElement(throttle, 'paramsToUseForLimit').text = ",".join(
params_to_use)
params_to_use = data.get("parameters-check-list", [])
XML.SubElement(throttle, "paramsToUseForLimit").text = ",".join(params_to_use)
def branch_api(registry, xml_parent, data):
@ -354,16 +344,17 @@ def branch_api(registry, xml_parent, data):
/../../tests/properties/fixtures/branch-api-full.yaml
:language: yaml
"""
branch = XML.SubElement(xml_parent, 'jenkins.branch.'
'RateLimitBranchProperty_-JobPropertyImpl')
branch.set('plugin', 'branch-api')
branch = XML.SubElement(
xml_parent, "jenkins.branch." "RateLimitBranchProperty_-JobPropertyImpl"
)
branch.set("plugin", "branch-api")
valid_time_periods = ['Hour', 'Day', 'Week', 'Month', 'Year']
valid_time_periods = ["Hour", "Day", "Week", "Month", "Year"]
mapping = [
('time-period', 'durationName', 'Hour', valid_time_periods),
('number-of-builds', 'count', 1),
('skip-rate-limit', 'userBoost', False),
("time-period", "durationName", "Hour", valid_time_periods),
("number-of-builds", "count", 1),
("skip-rate-limit", "userBoost", False),
]
helpers.convert_mapping_to_xml(branch, data, mapping, fail_required=True)
@ -383,19 +374,16 @@ def sidebar(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/sidebar02.yaml
:language: yaml
"""
sidebar = xml_parent.find('hudson.plugins.sidebar__link.ProjectLinks')
sidebar = xml_parent.find("hudson.plugins.sidebar__link.ProjectLinks")
if sidebar is None:
sidebar = XML.SubElement(xml_parent,
'hudson.plugins.sidebar__link.ProjectLinks')
links = XML.SubElement(sidebar, 'links')
sidebar = XML.SubElement(
xml_parent, "hudson.plugins.sidebar__link.ProjectLinks"
)
links = XML.SubElement(sidebar, "links")
else:
links = sidebar.find('links')
action = XML.SubElement(links, 'hudson.plugins.sidebar__link.LinkAction')
mapping = [
('url', 'url', ''),
('text', 'text', ''),
('icon', 'icon', ''),
]
links = sidebar.find("links")
action = XML.SubElement(links, "hudson.plugins.sidebar__link.LinkAction")
mapping = [("url", "url", ""), ("text", "text", ""), ("icon", "icon", "")]
helpers.convert_mapping_to_xml(action, data, mapping, fail_required=True)
@ -423,43 +411,40 @@ def inject(registry, xml_parent, data):
:language: yaml
"""
inject = XML.SubElement(xml_parent,
'EnvInjectJobProperty')
info = XML.SubElement(inject, 'info')
inject = XML.SubElement(xml_parent, "EnvInjectJobProperty")
info = XML.SubElement(inject, "info")
mapping = [
('properties-file', 'propertiesFilePath', None),
('properties-content', 'propertiesContent', None),
('script-file', 'scriptFilePath', None),
('script-content', 'scriptContent', None),
('load-from-master', 'loadFilesFromMaster', False),
("properties-file", "propertiesFilePath", None),
("properties-content", "propertiesContent", None),
("script-file", "scriptFilePath", None),
("script-content", "scriptContent", None),
("load-from-master", "loadFilesFromMaster", False),
]
helpers.convert_mapping_to_xml(info, data, mapping, fail_required=False)
# determine version of plugin
plugin_info = registry.get_plugin_info("Groovy")
version = pkg_resources.parse_version(plugin_info.get('version', '0'))
version = pkg_resources.parse_version(plugin_info.get("version", "0"))
if version >= pkg_resources.parse_version("2.0.0"):
secure_groovy_script = XML.SubElement(info, 'secureGroovyScript')
secure_groovy_script = XML.SubElement(info, "secureGroovyScript")
mapping = [
('groovy-content', 'script', None),
('groovy-sandbox', 'sandbox', False),
("groovy-content", "script", None),
("groovy-sandbox", "sandbox", False),
]
helpers.convert_mapping_to_xml(secure_groovy_script, data, mapping,
fail_required=False)
helpers.convert_mapping_to_xml(
secure_groovy_script, data, mapping, fail_required=False
)
else:
mapping = [
('groovy-content', 'groovyScriptContent', None),
]
helpers.convert_mapping_to_xml(info, data, mapping,
fail_required=False)
mapping = [("groovy-content", "groovyScriptContent", None)]
helpers.convert_mapping_to_xml(info, data, mapping, fail_required=False)
mapping = [
('enabled', 'on', True),
('keep-system-variables', 'keepJenkinsSystemVariables', True),
('keep-build-variables', 'keepBuildVariables', True),
('override-build-parameters', 'overrideBuildParameters', False),
("enabled", "on", True),
("keep-system-variables", "keepJenkinsSystemVariables", True),
("keep-build-variables", "keepBuildVariables", True),
("override-build-parameters", "overrideBuildParameters", False),
]
helpers.convert_mapping_to_xml(inject, data, mapping, fail_required=True)
@ -479,11 +464,12 @@ def authenticated_build(registry, xml_parent, data):
"""
# TODO: generalize this
security = XML.SubElement(xml_parent,
'hudson.security.'
'AuthorizationMatrixProperty')
XML.SubElement(security, 'permission').text = (
'hudson.model.Item.Build:authenticated')
security = XML.SubElement(
xml_parent, "hudson.security." "AuthorizationMatrixProperty"
)
XML.SubElement(
security, "permission"
).text = "hudson.model.Item.Build:authenticated"
def authorization(registry, xml_parent, data):
@ -523,38 +509,39 @@ def authorization(registry, xml_parent, data):
:language: yaml
"""
credentials = 'com.cloudbees.plugins.credentials.CredentialsProvider.'
ownership = 'com.synopsys.arc.jenkins.plugins.ownership.OwnershipPlugin.'
credentials = "com.cloudbees.plugins.credentials.CredentialsProvider."
ownership = "com.synopsys.arc.jenkins.plugins.ownership.OwnershipPlugin."
mapping = {
'credentials-create': ''.join((credentials, 'Create')),
'credentials-delete': ''.join((credentials, 'Delete')),
'credentials-manage-domains': ''.join((credentials, 'ManageDomains')),
'credentials-update': ''.join((credentials, 'Update')),
'credentials-view': ''.join((credentials, 'View')),
'job-build': 'hudson.model.Item.Build',
'job-cancel': 'hudson.model.Item.Cancel',
'job-configure': 'hudson.model.Item.Configure',
'job-delete': 'hudson.model.Item.Delete',
'job-discover': 'hudson.model.Item.Discover',
'job-extended-read': 'hudson.model.Item.ExtendedRead',
'job-move': 'hudson.model.Item.Move',
'job-read': 'hudson.model.Item.Read',
'job-status': 'hudson.model.Item.ViewStatus',
'job-workspace': 'hudson.model.Item.Workspace',
'ownership-jobs': ''.join((ownership, 'Jobs')),
'run-delete': 'hudson.model.Run.Delete',
'run-replay': 'hudson.model.Run.Replay',
'run-update': 'hudson.model.Run.Update',
'scm-tag': 'hudson.scm.SCM.Tag',
"credentials-create": "".join((credentials, "Create")),
"credentials-delete": "".join((credentials, "Delete")),
"credentials-manage-domains": "".join((credentials, "ManageDomains")),
"credentials-update": "".join((credentials, "Update")),
"credentials-view": "".join((credentials, "View")),
"job-build": "hudson.model.Item.Build",
"job-cancel": "hudson.model.Item.Cancel",
"job-configure": "hudson.model.Item.Configure",
"job-delete": "hudson.model.Item.Delete",
"job-discover": "hudson.model.Item.Discover",
"job-extended-read": "hudson.model.Item.ExtendedRead",
"job-move": "hudson.model.Item.Move",
"job-read": "hudson.model.Item.Read",
"job-status": "hudson.model.Item.ViewStatus",
"job-workspace": "hudson.model.Item.Workspace",
"ownership-jobs": "".join((ownership, "Jobs")),
"run-delete": "hudson.model.Run.Delete",
"run-replay": "hudson.model.Run.Replay",
"run-update": "hudson.model.Run.Update",
"scm-tag": "hudson.scm.SCM.Tag",
}
if data:
matrix = XML.SubElement(xml_parent,
'hudson.security.AuthorizationMatrixProperty')
matrix = XML.SubElement(
xml_parent, "hudson.security.AuthorizationMatrixProperty"
)
for (username, perms) in data.items():
for perm in perms:
pe = XML.SubElement(matrix, 'permission')
pe = XML.SubElement(matrix, "permission")
try:
pe.text = "{0}:{1}".format(mapping[perm], username)
except KeyError:
@ -577,39 +564,32 @@ def priority_sorter(registry, xml_parent, data):
/../../tests/properties/fixtures/priority_sorter002.yaml
:language: yaml
"""
plugin_info = registry.get_plugin_info('PrioritySorter')
version = pkg_resources.parse_version(plugin_info.get('version', '0'))
plugin_info = registry.get_plugin_info("PrioritySorter")
version = pkg_resources.parse_version(plugin_info.get("version", "0"))
if version >= pkg_resources.parse_version("3.0"):
priority_sorter_tag = XML.SubElement(
xml_parent,
'jenkins.advancedqueue.jobinclusion.'
'strategy.JobInclusionJobProperty')
"jenkins.advancedqueue.jobinclusion." "strategy.JobInclusionJobProperty",
)
mapping = [
('use', 'useJobGroup', True),
('priority', 'jobGroupName', None)
]
mapping = [("use", "useJobGroup", True), ("priority", "jobGroupName", None)]
elif version >= pkg_resources.parse_version("2.0"):
priority_sorter_tag = XML.SubElement(xml_parent,
'jenkins.advancedqueue.priority.'
'strategy.PriorityJobProperty')
priority_sorter_tag = XML.SubElement(
xml_parent, "jenkins.advancedqueue.priority." "strategy.PriorityJobProperty"
)
mapping = [
('use', 'useJobPriority', True),
('priority', 'priority', None)
]
mapping = [("use", "useJobPriority", True), ("priority", "priority", None)]
else:
priority_sorter_tag = XML.SubElement(xml_parent,
'hudson.queueSorter.'
'PrioritySorterJobProperty')
priority_sorter_tag = XML.SubElement(
xml_parent, "hudson.queueSorter." "PrioritySorterJobProperty"
)
mapping = [
('priority', 'priority', None),
]
mapping = [("priority", "priority", None)]
helpers.convert_mapping_to_xml(
priority_sorter_tag, data, mapping, fail_required=True)
priority_sorter_tag, data, mapping, fail_required=True
)
def build_blocker(registry, xml_parent, data):
@ -643,25 +623,25 @@ def build_blocker(registry, xml_parent, data):
/../../tests/properties/fixtures/build-blocker-full.yaml
:language: yaml
"""
blocker = XML.SubElement(xml_parent,
'hudson.plugins.'
'buildblocker.BuildBlockerProperty')
if data is None or 'blocking-jobs' not in data:
raise JenkinsJobsException('blocking-jobs field is missing')
elif data.get('blocking-jobs', None) is None:
raise JenkinsJobsException('blocking-jobs list must not be empty')
blocker = XML.SubElement(
xml_parent, "hudson.plugins." "buildblocker.BuildBlockerProperty"
)
if data is None or "blocking-jobs" not in data:
raise JenkinsJobsException("blocking-jobs field is missing")
elif data.get("blocking-jobs", None) is None:
raise JenkinsJobsException("blocking-jobs list must not be empty")
jobs = ''
jobs = ""
for setting, value in data.items():
if setting == 'blocking-jobs':
jobs = '\n'.join(value)
block_level_types = ['GLOBAL', 'NODE']
queue_scan_types = ['DISABLED', 'ALL', 'BUILDABLE']
if setting == "blocking-jobs":
jobs = "\n".join(value)
block_level_types = ["GLOBAL", "NODE"]
queue_scan_types = ["DISABLED", "ALL", "BUILDABLE"]
mapping = [
('use-build-blocker', 'useBuildBlocker', True),
('', 'blockingJobs', jobs),
('block-level', 'blockLevel', 'GLOBAL', block_level_types),
('queue-scanning', 'scanQueueFor', 'DISABLED', queue_scan_types),
("use-build-blocker", "useBuildBlocker", True),
("", "blockingJobs", jobs),
("block-level", "blockLevel", "GLOBAL", block_level_types),
("queue-scanning", "scanQueueFor", "DISABLED", queue_scan_types),
]
helpers.convert_mapping_to_xml(blocker, data, mapping, fail_required=True)
@ -684,17 +664,16 @@ def copyartifact(registry, xml_parent, data):
:language: yaml
"""
copyartifact = XML.SubElement(xml_parent,
'hudson.plugins.'
'copyartifact.'
'CopyArtifactPermissionProperty',
plugin='copyartifact')
if not data or not data.get('projects', None):
raise JenkinsJobsException("projects string must exist and "
"not be empty")
projectlist = XML.SubElement(copyartifact, 'projectNameList')
for project in str(data.get('projects')).split(','):
XML.SubElement(projectlist, 'string').text = project
copyartifact = XML.SubElement(
xml_parent,
"hudson.plugins." "copyartifact." "CopyArtifactPermissionProperty",
plugin="copyartifact",
)
if not data or not data.get("projects", None):
raise JenkinsJobsException("projects string must exist and " "not be empty")
projectlist = XML.SubElement(copyartifact, "projectNameList")
for project in str(data.get("projects")).split(","):
XML.SubElement(projectlist, "string").text = project
def batch_tasks(registry, xml_parent, data):
@ -722,18 +701,12 @@ def batch_tasks(registry, xml_parent, data):
:language: yaml
"""
pdef = XML.SubElement(xml_parent,
'hudson.plugins.batch__task.BatchTaskProperty')
tasks = XML.SubElement(pdef, 'tasks')
pdef = XML.SubElement(xml_parent, "hudson.plugins.batch__task.BatchTaskProperty")
tasks = XML.SubElement(pdef, "tasks")
for task in data:
batch_task = XML.SubElement(tasks,
'hudson.plugins.batch__task.BatchTask')
mapping = [
('name', 'name', None),
('script', 'script', None),
]
helpers.convert_mapping_to_xml(
batch_task, task, mapping, fail_required=True)
batch_task = XML.SubElement(tasks, "hudson.plugins.batch__task.BatchTask")
mapping = [("name", "name", None), ("script", "script", None)]
helpers.convert_mapping_to_xml(batch_task, task, mapping, fail_required=True)
def heavy_job(registry, xml_parent, data):
@ -752,12 +725,10 @@ def heavy_job(registry, xml_parent, data):
:language: yaml
"""
heavyjob = XML.SubElement(xml_parent,
'hudson.plugins.'
'heavy__job.HeavyJobProperty')
mapping = [
('weight', 'weight', 1),
]
heavyjob = XML.SubElement(
xml_parent, "hudson.plugins." "heavy__job.HeavyJobProperty"
)
mapping = [("weight", "weight", 1)]
helpers.convert_mapping_to_xml(heavyjob, data, mapping, fail_required=True)
@ -782,18 +753,18 @@ def slave_utilization(registry, xml_parent, data):
:language: yaml
"""
utilization = XML.SubElement(
xml_parent, 'com.suryagaddipati.jenkins.SlaveUtilizationProperty')
xml_parent, "com.suryagaddipati.jenkins.SlaveUtilizationProperty"
)
percent = int(data.get('slave-percentage', 0))
percent = int(data.get("slave-percentage", 0))
exclusive_node_access = True if percent else False
mapping = [
('', 'needsExclusiveAccessToNode', exclusive_node_access),
('', 'slaveUtilizationPercentage', percent),
('single-instance-per-slave', 'singleInstancePerSlave', False),
("", "needsExclusiveAccessToNode", exclusive_node_access),
("", "slaveUtilizationPercentage", percent),
("single-instance-per-slave", "singleInstancePerSlave", False),
]
helpers.convert_mapping_to_xml(
utilization, data, mapping, fail_required=True)
helpers.convert_mapping_to_xml(utilization, data, mapping, fail_required=True)
def delivery_pipeline(registry, xml_parent, data):
@ -818,14 +789,13 @@ def delivery_pipeline(registry, xml_parent, data):
/../../tests/properties/fixtures/delivery-pipeline-full.yaml
:language: yaml
"""
pipeline = XML.SubElement(
xml_parent, 'se.diabol.jenkins.pipeline.PipelineProperty')
pipeline.set('plugin', 'delivery-pipeline-plugin')
pipeline = XML.SubElement(xml_parent, "se.diabol.jenkins.pipeline.PipelineProperty")
pipeline.set("plugin", "delivery-pipeline-plugin")
mapping = [
('stage', 'stageName', ''),
('task', 'taskName', ''),
('description', 'descriptionTemplate', ''),
("stage", "stageName", ""),
("task", "taskName", ""),
("description", "descriptionTemplate", ""),
]
helpers.convert_mapping_to_xml(pipeline, data, mapping, fail_required=True)
@ -846,14 +816,12 @@ def zeromq_event(registry, xml_parent, data):
"""
zmq_event = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.'
'ZMQEventPublisher.HudsonNotificationProperty')
mapping = [
('', 'enabled', True),
]
helpers.convert_mapping_to_xml(
zmq_event, data, mapping, fail_required=True)
zmq_event = XML.SubElement(
xml_parent,
"org.jenkinsci.plugins." "ZMQEventPublisher.HudsonNotificationProperty",
)
mapping = [("", "enabled", True)]
helpers.convert_mapping_to_xml(zmq_event, data, mapping, fail_required=True)
def slack(registry, xml_parent, data):
@ -896,39 +864,36 @@ def slack(registry, xml_parent, data):
"""
logger = logging.getLogger(__name__)
plugin_info = registry.get_plugin_info('Slack Notification Plugin')
plugin_ver = pkg_resources.parse_version(plugin_info.get('version', "0"))
plugin_info = registry.get_plugin_info("Slack Notification Plugin")
plugin_ver = pkg_resources.parse_version(plugin_info.get("version", "0"))
if plugin_ver >= pkg_resources.parse_version("2.0"):
logger.warning(
"properties section is not used with plugin version >= 2.0",
)
logger.warning("properties section is not used with plugin version >= 2.0")
mapping = (
('notify-start', 'startNotification', False),
('notify-success', 'notifySuccess', False),
('notify-aborted', 'notifyAborted', False),
('notify-not-built', 'notifyNotBuilt', False),
('notify-unstable', 'notifyUnstable', False),
('notify-failure', 'notifyFailure', False),
('notify-back-to-normal', 'notifyBackToNormal', False),
('notify-repeated-failure', 'notifyRepeatedFailure', False),
('include-test-summary', 'includeTestSummary', False),
('include-custom-message', 'includeCustomMessage', False),
('custom-message', 'customMessage', ''),
('room', 'room', ''),
("notify-start", "startNotification", False),
("notify-success", "notifySuccess", False),
("notify-aborted", "notifyAborted", False),
("notify-not-built", "notifyNotBuilt", False),
("notify-unstable", "notifyUnstable", False),
("notify-failure", "notifyFailure", False),
("notify-back-to-normal", "notifyBackToNormal", False),
("notify-repeated-failure", "notifyRepeatedFailure", False),
("include-test-summary", "includeTestSummary", False),
("include-custom-message", "includeCustomMessage", False),
("custom-message", "customMessage", ""),
("room", "room", ""),
)
slack = XML.SubElement(
xml_parent,
'jenkins.plugins.slack.SlackNotifier_-SlackJobProperty',
xml_parent, "jenkins.plugins.slack.SlackNotifier_-SlackJobProperty"
)
# Ensure that custom-message is set when include-custom-message is set
# to true.
if data.get('include-custom-message', False):
if not data.get('custom-message', ''):
raise MissingAttributeError('custom-message')
if data.get("include-custom-message", False):
if not data.get("custom-message", ""):
raise MissingAttributeError("custom-message")
helpers.convert_mapping_to_xml(slack, data, mapping, fail_required=True)
@ -955,16 +920,14 @@ def rebuild(registry, xml_parent, data):
.. literalinclude:: /../../tests/properties/fixtures/rebuild-full.yaml
:language: yaml
"""
sub_element = XML.SubElement(xml_parent,
'com.sonyericsson.rebuild.RebuildSettings')
sub_element.set('plugin', 'rebuild')
sub_element = XML.SubElement(xml_parent, "com.sonyericsson.rebuild.RebuildSettings")
sub_element.set("plugin", "rebuild")
mapping = [
('auto-rebuild', 'autoRebuild', False),
('rebuild-disabled', 'rebuildDisabled', False),
("auto-rebuild", "autoRebuild", False),
("rebuild-disabled", "rebuildDisabled", False),
]
helpers.convert_mapping_to_xml(
sub_element, data, mapping, fail_required=True)
helpers.convert_mapping_to_xml(sub_element, data, mapping, fail_required=True)
def build_discarder(registry, xml_parent, data):
@ -987,19 +950,17 @@ def build_discarder(registry, xml_parent, data):
/../../tests/properties/fixtures/build-discarder-002.yaml
:language: yaml
"""
base_sub = XML.SubElement(xml_parent,
'jenkins.model.BuildDiscarderProperty')
strategy = XML.SubElement(base_sub, 'strategy')
strategy.set('class', 'hudson.tasks.LogRotator')
base_sub = XML.SubElement(xml_parent, "jenkins.model.BuildDiscarderProperty")
strategy = XML.SubElement(base_sub, "strategy")
strategy.set("class", "hudson.tasks.LogRotator")
mappings = [
('days-to-keep', 'daysToKeep', -1),
('num-to-keep', 'numToKeep', -1),
('artifact-days-to-keep', 'artifactDaysToKeep', -1),
('artifact-num-to-keep', 'artifactNumToKeep', -1),
("days-to-keep", "daysToKeep", -1),
("num-to-keep", "numToKeep", -1),
("artifact-days-to-keep", "artifactDaysToKeep", -1),
("artifact-num-to-keep", "artifactNumToKeep", -1),
]
helpers.convert_mapping_to_xml(
strategy, data, mappings, fail_required=True)
helpers.convert_mapping_to_xml(strategy, data, mappings, fail_required=True)
def slave_prerequisites(registry, xml_parent, data):
@ -1027,14 +988,16 @@ def slave_prerequisites(registry, xml_parent, data):
/../../tests/properties/fixtures/slave-prerequisites-full.yaml
:language: yaml
"""
prereqs = XML.SubElement(xml_parent,
'com.cloudbees.plugins.JobPrerequisites')
prereqs = XML.SubElement(xml_parent, "com.cloudbees.plugins.JobPrerequisites")
mappings = [
('script', 'script', None),
('interpreter', 'interpreter', 'shell', {
'cmd': 'windows batch command',
'shell': 'shell script'}),
("script", "script", None),
(
"interpreter",
"interpreter",
"shell",
{"cmd": "windows batch command", "shell": "shell script"},
),
]
helpers.convert_mapping_to_xml(prereqs, data, mappings, fail_required=True)
@ -1077,24 +1040,23 @@ def groovy_label(registry, xml_parent, data):
/../../tests/properties/fixtures/groovy-label-full.yaml
:language: yaml
"""
sub_element = XML.SubElement(xml_parent,
'jp.ikedam.jenkins.plugins.'
'groovy__label__assignment.'
'GroovyLabelAssignmentProperty')
sub_element.set('plugin', 'groovy-label-assignment')
security = XML.SubElement(sub_element, 'secureGroovyScript')
security.set('plugin', 'script-security')
mapping = [
('script', 'script', ''),
('sandbox', 'sandbox', False),
]
sub_element = XML.SubElement(
xml_parent,
"jp.ikedam.jenkins.plugins."
"groovy__label__assignment."
"GroovyLabelAssignmentProperty",
)
sub_element.set("plugin", "groovy-label-assignment")
security = XML.SubElement(sub_element, "secureGroovyScript")
security.set("plugin", "script-security")
mapping = [("script", "script", ""), ("sandbox", "sandbox", False)]
helpers.convert_mapping_to_xml(security, data, mapping, fail_required=True)
if data and 'classpath' in data:
classpath = XML.SubElement(security, 'classpath')
for value in data['classpath']:
entry = XML.SubElement(classpath, 'entry')
XML.SubElement(entry, 'url').text = value
if data and "classpath" in data:
classpath = XML.SubElement(security, "classpath")
for value in data["classpath"]:
entry = XML.SubElement(classpath, "entry")
XML.SubElement(entry, "url").text = value
def lockable_resources(registry, xml_parent, data):
@ -1137,26 +1099,24 @@ def lockable_resources(registry, xml_parent, data):
:language: yaml
"""
lockable_resources = XML.SubElement(
xml_parent,
'org.jenkins.plugins.lockableresources.RequiredResourcesProperty')
if data.get('resources') and data.get('label'):
raise AttributeConflictError('resources', ('label',))
xml_parent, "org.jenkins.plugins.lockableresources.RequiredResourcesProperty"
)
if data.get("resources") and data.get("label"):
raise AttributeConflictError("resources", ("label",))
mapping = [
('resources', 'resourceNames', ''),
('var-name', 'resourceNamesVar', ''),
('number', 'resourceNumber', 0),
('label', 'labelName', ''),
("resources", "resourceNames", ""),
("var-name", "resourceNamesVar", ""),
("number", "resourceNumber", 0),
("label", "labelName", ""),
]
helpers.convert_mapping_to_xml(
lockable_resources, data, mapping, fail_required=True)
secure_groovy_script = XML.SubElement(lockable_resources,
'resourceMatchScript')
mapping = [
('match-script', 'script', None),
('groovy-sandbox', 'sandbox', False),
]
helpers.convert_mapping_to_xml(secure_groovy_script, data, mapping,
fail_required=False)
lockable_resources, data, mapping, fail_required=True
)
secure_groovy_script = XML.SubElement(lockable_resources, "resourceMatchScript")
mapping = [("match-script", "script", None), ("groovy-sandbox", "sandbox", False)]
helpers.convert_mapping_to_xml(
secure_groovy_script, data, mapping, fail_required=False
)
def docker_container(registry, xml_parent, data):
@ -1189,24 +1149,25 @@ def docker_container(registry, xml_parent, data):
:language: yaml
"""
xml_docker = XML.SubElement(
xml_parent, 'com.nirima.jenkins.plugins.docker.DockerJobProperty')
xml_parent, "com.nirima.jenkins.plugins.docker.DockerJobProperty"
)
registry = XML.SubElement(xml_docker, 'registry')
registry.set('plugin', 'docker-commons')
registry = XML.SubElement(xml_docker, "registry")
registry.set("plugin", "docker-commons")
registry_mapping = [
('docker-registry-url', 'url', ''),
('credentials-id', 'credentialsId', ''),
("docker-registry-url", "url", ""),
("credentials-id", "credentialsId", ""),
]
helpers.convert_mapping_to_xml(
registry, data, registry_mapping, fail_required=False)
registry, data, registry_mapping, fail_required=False
)
mapping = [
('commit-on-success', 'tagOnCompletion', False),
('additional-tag', 'additionalTag', ''),
('push-on-success', 'pushOnSuccess', False),
('clean-local-images', 'cleanImages', True),
("commit-on-success", "tagOnCompletion", False),
("additional-tag", "additionalTag", ""),
("push-on-success", "pushOnSuccess", False),
("clean-local-images", "cleanImages", True),
]
helpers.convert_mapping_to_xml(
xml_docker, data, mapping, fail_required=True)
helpers.convert_mapping_to_xml(xml_docker, data, mapping, fail_required=True)
def disable_resume(registry, xml_parent, data):
@ -1222,9 +1183,10 @@ def disable_resume(registry, xml_parent, data):
:language: yaml
"""
XML.SubElement(xml_parent,
'org.jenkinsci.plugins.workflow.job.properties.'
'DisableResumeJobProperty')
XML.SubElement(
xml_parent,
"org.jenkinsci.plugins.workflow.job.properties." "DisableResumeJobProperty",
)
def cachet_gating(registry, xml_parent, data):
@ -1246,32 +1208,30 @@ def cachet_gating(registry, xml_parent, data):
:language: yaml
"""
cachet = XML.SubElement(
xml_parent, 'com.redhat.jenkins.plugins.cachet.CachetJobProperty')
cachet.set('plugin', 'cachet-gating')
xml_parent, "com.redhat.jenkins.plugins.cachet.CachetJobProperty"
)
cachet.set("plugin", "cachet-gating")
mapping = [
('required-resources', 'requiredResources', True),
]
helpers.convert_mapping_to_xml(
cachet, data, mapping, fail_required=True)
mapping = [("required-resources", "requiredResources", True)]
helpers.convert_mapping_to_xml(cachet, data, mapping, fail_required=True)
resources_data = data.get('resources', [])
resources_data = data.get("resources", [])
if resources_data:
resources = XML.SubElement(cachet, 'resources')
resources = XML.SubElement(cachet, "resources")
for resource in resources_data:
XML.SubElement(resources, 'string').text = str(resource)
XML.SubElement(resources, "string").text = str(resource)
class Properties(jenkins_jobs.modules.base.Base):
sequence = 20
component_type = 'property'
component_list_type = 'properties'
component_type = "property"
component_list_type = "properties"
def gen_xml(self, xml_parent, data):
properties = xml_parent.find('properties')
properties = xml_parent.find("properties")
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
properties = XML.SubElement(xml_parent, "properties")
for prop in data.get('properties', []):
self.registry.dispatch('property', properties, prop)
for prop in data.get("properties", []):
self.registry.dispatch("property", properties, prop)

File diff suppressed because it is too large Load Diff

View File

@ -57,21 +57,19 @@ def email(registry, xml_parent, data):
recipients: breakage@example.com
"""
mailer = XML.SubElement(xml_parent,
'hudson.maven.reporters.MavenMailer')
XML.SubElement(mailer, 'recipients').text = data['recipients']
mailer = XML.SubElement(xml_parent, "hudson.maven.reporters.MavenMailer")
XML.SubElement(mailer, "recipients").text = data["recipients"]
# Note the logic reversal (included here to match the GUI
if data.get('notify-every-unstable-build', True):
XML.SubElement(mailer, 'dontNotifyEveryUnstableBuild').text = 'false'
if data.get("notify-every-unstable-build", True):
XML.SubElement(mailer, "dontNotifyEveryUnstableBuild").text = "false"
else:
XML.SubElement(mailer, 'dontNotifyEveryUnstableBuild').text = 'true'
XML.SubElement(mailer, "dontNotifyEveryUnstableBuild").text = "true"
mapping = [
('send-to-individuals', 'sendToIndividuals', False),
('notify-for-each-module', 'perModuleEmail', True),
("send-to-individuals", "sendToIndividuals", False),
("notify-for-each-module", "perModuleEmail", True),
]
helpers.convert_mapping_to_xml(
mailer, data, mapping, fail_required=False)
helpers.convert_mapping_to_xml(mailer, data, mapping, fail_required=False)
def findbugs(registry, xml_parent, data):
@ -131,29 +129,29 @@ def findbugs(registry, xml_parent, data):
.. literalinclude:: /../../tests/reporters/fixtures/findbugs01.yaml
"""
findbugs = XML.SubElement(xml_parent,
'hudson.plugins.findbugs.FindBugsReporter')
findbugs.set('plugin', 'findbugs')
findbugs = XML.SubElement(xml_parent, "hudson.plugins.findbugs.FindBugsReporter")
findbugs.set("plugin", "findbugs")
helpers.findbugs_settings(findbugs, data)
helpers.build_trends_publisher('[FINDBUGS] ', findbugs, data)
helpers.build_trends_publisher("[FINDBUGS] ", findbugs, data)
class Reporters(jenkins_jobs.modules.base.Base):
sequence = 55
component_type = 'reporter'
component_list_type = 'reporters'
component_type = "reporter"
component_list_type = "reporters"
def gen_xml(self, xml_parent, data):
if 'reporters' not in data:
if "reporters" not in data:
return
if xml_parent.tag != 'maven2-moduleset':
raise JenkinsJobsException("Reporters may only be used for Maven "
"modules.")
if xml_parent.tag != "maven2-moduleset":
raise JenkinsJobsException(
"Reporters may only be used for Maven " "modules."
)
reporters = XML.SubElement(xml_parent, 'reporters')
reporters = XML.SubElement(xml_parent, "reporters")
for action in data.get('reporters', []):
self.registry.dispatch('reporter', reporters, action)
for action in data.get("reporters", []):
self.registry.dispatch("reporter", reporters, action)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -33,17 +33,16 @@ class All(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
root = XML.Element('hudson.model.AllView')
root = XML.Element("hudson.model.AllView")
mapping = [
('name', 'name', None),
('description', 'description', ''),
('filter-executors', 'filterExecutors', False),
('filter-queue', 'filterQueue', False),
("name", "name", None),
("description", "description", ""),
("filter-executors", "filterExecutors", False),
("filter-queue", "filterQueue", False),
]
helpers.convert_mapping_to_xml(root, data, mapping, fail_required=True)
XML.SubElement(root, 'properties',
{'class': 'hudson.model.View$PropertyList'})
XML.SubElement(root, "properties", {"class": "hudson.model.View$PropertyList"})
return root

View File

@ -17,195 +17,189 @@ import jenkins_jobs.modules.helpers as helpers
def build_duration(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.BuildDurationFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.BuildDurationFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('build-duration-type', 'buildCountTypeString', 'Latest'),
('amount-type', 'amountTypeString', 'Hours'),
('amount', 'amount', '0'),
('less-than', 'lessThan', True),
('build-duration-minutes', 'buildDurationMinutes', '0'),
("match-type", "includeExcludeTypeString", "includeMatched"),
("build-duration-type", "buildCountTypeString", "Latest"),
("amount-type", "amountTypeString", "Hours"),
("amount", "amount", "0"),
("less-than", "lessThan", True),
("build-duration-minutes", "buildDurationMinutes", "0"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def build_status(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.BuildStatusFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.BuildStatusFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('never-built', 'neverBuilt', False),
('building', 'building', False),
('in-build-queue', 'inBuildQueue', False),
("match-type", "includeExcludeTypeString", "includeMatched"),
("never-built", "neverBuilt", False),
("building", "building", False),
("in-build-queue", "inBuildQueue", False),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def build_trend(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.BuildTrendFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.BuildTrendFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('build-trend-type', 'buildCountTypeString', 'Latest'),
('amount-type', 'amountTypeString', 'Hours'),
('amount', 'amount', '0'),
('status', 'statusTypeString', 'Completed'),
("match-type", "includeExcludeTypeString", "includeMatched"),
("build-trend-type", "buildCountTypeString", "Latest"),
("amount-type", "amountTypeString", "Hours"),
("amount", "amount", "0"),
("status", "statusTypeString", "Completed"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def fallback(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.AddRemoveFallbackFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.AddRemoveFallbackFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('fallback-type', 'fallbackTypeString', 'REMOVE_ALL_IF_ALL_INCLUDED'),
('fallback-type', 'fallbackType', 'REMOVE_ALL_IF_ALL_INCLUDED'),
("fallback-type", "fallbackTypeString", "REMOVE_ALL_IF_ALL_INCLUDED"),
("fallback-type", "fallbackType", "REMOVE_ALL_IF_ALL_INCLUDED"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def job_status(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.JobStatusFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.JobStatusFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('unstable', 'unstable', False),
('failed', 'failed', False),
('aborted', 'aborted', False),
('disabled', 'disabled', False),
('stable', 'stable', False),
("match-type", "includeExcludeTypeString", "includeMatched"),
("unstable", "unstable", False),
("failed", "failed", False),
("aborted", "aborted", False),
("disabled", "disabled", False),
("stable", "stable", False),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def job_type(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.JobTypeFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.JobTypeFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('job-type', 'jobType', 'hudson.model.FreeStyleProject'),
("match-type", "includeExcludeTypeString", "includeMatched"),
("job-type", "jobType", "hudson.model.FreeStyleProject"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def most_recent(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.MostRecentJobsFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.MostRecentJobsFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('max-to-include', 'maxToInclude', '0'),
('check-start-time', 'checkStartTime', False),
("max-to-include", "maxToInclude", "0"),
("check-start-time", "checkStartTime", False),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def other_views(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.OtherViewsFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.OtherViewsFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('view-name', 'otherViewName',
'&lt;select a view other than this one&gt;'),
("match-type", "includeExcludeTypeString", "includeMatched"),
("view-name", "otherViewName", "&lt;select a view other than this one&gt;"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def parameter(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.ParameterFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.ParameterFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('name', 'nameRegex', ''),
('value', 'valueRegex', ''),
('description', 'descriptionRegex', ''),
('use-default', 'useDefaultValue', False),
('match-builds-in-progress', 'matchBuildsInProgress', False),
('match-all-builds', 'matchAllBuilds', False),
('max-builds-to-match', 'maxBuildsToMatch', 0),
("match-type", "includeExcludeTypeString", "includeMatched"),
("name", "nameRegex", ""),
("value", "valueRegex", ""),
("description", "descriptionRegex", ""),
("use-default", "useDefaultValue", False),
("match-builds-in-progress", "matchBuildsInProgress", False),
("match-all-builds", "matchAllBuilds", False),
("max-builds-to-match", "maxBuildsToMatch", 0),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def scm(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.ScmTypeFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.ScmTypeFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('scm-type', 'scmType', 'hudson.scm.NullSCM'),
("match-type", "includeExcludeTypeString", "includeMatched"),
("scm-type", "scmType", "hudson.scm.NullSCM"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def secured_job(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.SecuredJobsFilter')
xml.set('plugin', 'view-job-filters')
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
]
xml = XML.SubElement(xml_parent, "hudson.views.SecuredJobsFilter")
xml.set("plugin", "view-job-filters")
mapping = [("match-type", "includeExcludeTypeString", "includeMatched")]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def regex_job(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.RegExJobFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.RegExJobFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('regex-name', 'valueTypeString', ''),
('regex', 'regex', ''),
("match-type", "includeExcludeTypeString", "includeMatched"),
("regex-name", "valueTypeString", ""),
("regex", "regex", ""),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def unclassified(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.UnclassifiedJobsFilter')
xml.set('plugin', 'view-job-filters')
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
]
xml = XML.SubElement(xml_parent, "hudson.views.UnclassifiedJobsFilter")
xml.set("plugin", "view-job-filters")
mapping = [("match-type", "includeExcludeTypeString", "includeMatched")]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def upstream_downstream(xml_parent, data):
xml = XML.SubElement(
xml_parent, 'hudson.views.UpstreamDownstreamJobsFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.UpstreamDownstreamJobsFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('include-upstream', 'includeUpstream', False),
('include-downstream', 'includeDownstream', False),
('recursive', 'recursive', False),
('exclude-originals', 'excludeOriginals', False),
("include-upstream", "includeUpstream", False),
("include-downstream", "includeDownstream", False),
("recursive", "recursive", False),
("exclude-originals", "excludeOriginals", False),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def user_permissions(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.SecurityFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.SecurityFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('configure', 'configure', False),
('build', 'build', False),
('workspace', 'workspace', False),
('permission-check', 'permissionCheckType', 'MustMatchAll'),
("match-type", "includeExcludeTypeString", "includeMatched"),
("configure", "configure", False),
("build", "build", False),
("workspace", "workspace", False),
("permission-check", "permissionCheckType", "MustMatchAll"),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)
def user_relevence(xml_parent, data):
xml = XML.SubElement(xml_parent, 'hudson.views.UserRelevanceFilter')
xml.set('plugin', 'view-job-filters')
xml = XML.SubElement(xml_parent, "hudson.views.UserRelevanceFilter")
xml.set("plugin", "view-job-filters")
mapping = [
('match-type', 'includeExcludeTypeString', 'includeMatched'),
('build-count', 'buildCountTypeString', 'AtLeastOne'),
('amount-type', 'amountTypeString', 'Hours'),
('amount', 'amount', '0'),
('match-user-id', 'matchUserId', False),
('match-user-fullname', 'matchUserFullName', False),
('ignore-case', 'ignoreCase', False),
('ignore-whitespace', 'ignoreWhitespace', False),
('ignore-non-alphaNumeric', 'ignoreNonAlphaNumeric', False),
('match-builder', 'matchBuilder', False),
('match-email', 'matchEmail', False),
('match-scm-changes', 'matchScmChanges', False),
("match-type", "includeExcludeTypeString", "includeMatched"),
("build-count", "buildCountTypeString", "AtLeastOne"),
("amount-type", "amountTypeString", "Hours"),
("amount", "amount", "0"),
("match-user-id", "matchUserId", False),
("match-user-fullname", "matchUserFullName", False),
("ignore-case", "ignoreCase", False),
("ignore-whitespace", "ignoreWhitespace", False),
("ignore-non-alphaNumeric", "ignoreNonAlphaNumeric", False),
("match-builder", "matchBuilder", False),
("match-email", "matchEmail", False),
("match-scm-changes", "matchScmChanges", False),
]
helpers.convert_mapping_to_xml(xml, data, mapping, fail_required=True)

View File

@ -227,147 +227,153 @@ import jenkins_jobs.modules.helpers as helpers
import jenkins_jobs.modules.view_jobfilters as view_jobfilters
COLUMN_DICT = {
'status': 'hudson.views.StatusColumn',
'weather': 'hudson.views.WeatherColumn',
'job': 'hudson.views.JobColumn',
'last-success': 'hudson.views.LastSuccessColumn',
'last-failure': 'hudson.views.LastFailureColumn',
'last-duration': 'hudson.views.LastDurationColumn',
'build-button': 'hudson.views.BuildButtonColumn',
'last-stable': 'hudson.views.LastStableColumn',
'robot-list': 'hudson.plugins.robot.view.RobotListViewColumn',
'find-bugs': 'hudson.plugins.findbugs.FindBugsColumn',
'jacoco': 'hudson.plugins.jacococoveragecolumn.JaCoCoColumn',
'git-branch': 'hudson.plugins.git.GitBranchSpecifierColumn',
'schedule-build':
'org.jenkinsci.plugins.schedulebuild.ScheduleBuildButtonColumn',
'priority-sorter': 'jenkins.advancedqueue.PrioritySorterJobColumn',
'build-filter': 'hudson.views.BuildFilterColumn',
'desc': 'jenkins.branch.DescriptionColumn',
'policy-violations':
'com.sonatype.insight.ci.hudson.QualityColumn '
'plugin="sonatype-clm-ci"',
'member-graph-view':
'com.barchart.jenkins.cascade.GraphViewColumn '
'plugin="maven-release-cascade"',
'extra-tests-total': [
['jenkins.plugins.extracolumns.TestResultColumn',
{'plugin': 'extra-columns'}],
'<testResultFormat>2</testResultFormat>'],
'extra-tests-failed': [
['jenkins.plugins.extracolumns.TestResultColumn',
{'plugin': 'extra-columns'}],
'<testResultFormat>3</testResultFormat>'],
'extra-tests-passed': [
['jenkins.plugins.extracolumns.TestResultColumn',
{'plugin': 'extra-columns'}],
'<testResultFormat>4</testResultFormat>'],
'extra-tests-skipped': [
['jenkins.plugins.extracolumns.TestResultColumn',
{'plugin': 'extra-columns'}],
'<testResultFormat>5</testResultFormat>'],
'extra-tests-format-0': [
['jenkins.plugins.extracolumns.TestResultColumn',
{'plugin': 'extra-columns'}],
'<testResultFormat>0</testResultFormat>'],
'extra-tests-format-1': [
['jenkins.plugins.extracolumns.TestResultColumn',
{'plugin': 'extra-columns'}],
'<testResultFormat>1</testResultFormat>'],
'extra-build-description': [
['jenkins.plugins.extracolumns.BuildDescriptionColumn',
{'plugin': 'extra-columns'}],
'<columnWidth>3</columnWidth>', '<forceWidth>false</forceWidth>'],
'extra-build-parameters': [
['jenkins.plugins.extracolumns.BuildParametersColumn',
{'plugin': 'extra-columns'}],
'<singlePara>false</singlePara>', '<parameterName/>'],
'extra-last-user-name':
'jenkins.plugins.extracolumns.UserNameColumn'
' plugin="extra-columns"',
'extra-last-output':
'jenkins.plugins.extracolumns.LastBuildConsoleColumn'
' plugin="extra-columns"',
'extra-workspace-link':
'jenkins.plugins.extracolumns.WorkspaceColumn '
'plugin="extra-columns"',
'extra-configure-button':
'jenkins.plugins.extracolumns.ConfigureProjectColumn'
' plugin="extra-columns"',
"status": "hudson.views.StatusColumn",
"weather": "hudson.views.WeatherColumn",
"job": "hudson.views.JobColumn",
"last-success": "hudson.views.LastSuccessColumn",
"last-failure": "hudson.views.LastFailureColumn",
"last-duration": "hudson.views.LastDurationColumn",
"build-button": "hudson.views.BuildButtonColumn",
"last-stable": "hudson.views.LastStableColumn",
"robot-list": "hudson.plugins.robot.view.RobotListViewColumn",
"find-bugs": "hudson.plugins.findbugs.FindBugsColumn",
"jacoco": "hudson.plugins.jacococoveragecolumn.JaCoCoColumn",
"git-branch": "hudson.plugins.git.GitBranchSpecifierColumn",
"schedule-build": "org.jenkinsci.plugins.schedulebuild.ScheduleBuildButtonColumn",
"priority-sorter": "jenkins.advancedqueue.PrioritySorterJobColumn",
"build-filter": "hudson.views.BuildFilterColumn",
"desc": "jenkins.branch.DescriptionColumn",
"policy-violations": "com.sonatype.insight.ci.hudson.QualityColumn "
'plugin="sonatype-clm-ci"',
"member-graph-view": "com.barchart.jenkins.cascade.GraphViewColumn "
'plugin="maven-release-cascade"',
"extra-tests-total": [
["jenkins.plugins.extracolumns.TestResultColumn", {"plugin": "extra-columns"}],
"<testResultFormat>2</testResultFormat>",
],
"extra-tests-failed": [
["jenkins.plugins.extracolumns.TestResultColumn", {"plugin": "extra-columns"}],
"<testResultFormat>3</testResultFormat>",
],
"extra-tests-passed": [
["jenkins.plugins.extracolumns.TestResultColumn", {"plugin": "extra-columns"}],
"<testResultFormat>4</testResultFormat>",
],
"extra-tests-skipped": [
["jenkins.plugins.extracolumns.TestResultColumn", {"plugin": "extra-columns"}],
"<testResultFormat>5</testResultFormat>",
],
"extra-tests-format-0": [
["jenkins.plugins.extracolumns.TestResultColumn", {"plugin": "extra-columns"}],
"<testResultFormat>0</testResultFormat>",
],
"extra-tests-format-1": [
["jenkins.plugins.extracolumns.TestResultColumn", {"plugin": "extra-columns"}],
"<testResultFormat>1</testResultFormat>",
],
"extra-build-description": [
[
"jenkins.plugins.extracolumns.BuildDescriptionColumn",
{"plugin": "extra-columns"},
],
"<columnWidth>3</columnWidth>",
"<forceWidth>false</forceWidth>",
],
"extra-build-parameters": [
[
"jenkins.plugins.extracolumns.BuildParametersColumn",
{"plugin": "extra-columns"},
],
"<singlePara>false</singlePara>",
"<parameterName/>",
],
"extra-last-user-name": "jenkins.plugins.extracolumns.UserNameColumn"
' plugin="extra-columns"',
"extra-last-output": "jenkins.plugins.extracolumns.LastBuildConsoleColumn"
' plugin="extra-columns"',
"extra-workspace-link": "jenkins.plugins.extracolumns.WorkspaceColumn "
'plugin="extra-columns"',
"extra-configure-button": "jenkins.plugins.extracolumns.ConfigureProjectColumn"
' plugin="extra-columns"',
}
DEFAULT_COLUMNS = ['status', 'weather', 'job', 'last-success', 'last-failure',
'last-duration', 'build-button']
DEFAULT_COLUMNS = [
"status",
"weather",
"job",
"last-success",
"last-failure",
"last-duration",
"build-button",
]
class List(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
root = XML.Element('hudson.model.ListView')
root = XML.Element("hudson.model.ListView")
mapping = [
('name', 'name', None),
('description', 'description', ''),
('filter-executors', 'filterExecutors', False),
('filter-queue', 'filterQueue', False),
("name", "name", None),
("description", "description", ""),
("filter-executors", "filterExecutors", False),
("filter-queue", "filterQueue", False),
]
helpers.convert_mapping_to_xml(root, data, mapping, fail_required=True)
XML.SubElement(root, 'properties',
{'class': 'hudson.model.View$PropertyList'})
XML.SubElement(root, "properties", {"class": "hudson.model.View$PropertyList"})
jn_xml = XML.SubElement(root, 'jobNames')
jobnames = data.get('job-name', None)
jn_xml = XML.SubElement(root, "jobNames")
jobnames = data.get("job-name", None)
XML.SubElement(
jn_xml,
'comparator', {
'class': 'hudson.util.CaseInsensitiveComparator'
}
jn_xml, "comparator", {"class": "hudson.util.CaseInsensitiveComparator"}
)
if jobnames is not None:
# Job names must be sorted in the xml
jobnames = sorted(jobnames, key=str.lower)
for jobname in jobnames:
XML.SubElement(jn_xml, 'string').text = str(jobname)
XML.SubElement(jn_xml, "string").text = str(jobname)
job_filter_xml = XML.SubElement(root, 'jobFilters')
jobfilters = data.get('job-filters', [])
job_filter_xml = XML.SubElement(root, "jobFilters")
jobfilters = data.get("job-filters", [])
for jobfilter in jobfilters:
filter = getattr(view_jobfilters, jobfilter.replace('-', '_'))
filter = getattr(view_jobfilters, jobfilter.replace("-", "_"))
filter(job_filter_xml, jobfilters.get(jobfilter))
c_xml = XML.SubElement(root, 'columns')
columns = data.get('columns', DEFAULT_COLUMNS)
c_xml = XML.SubElement(root, "columns")
columns = data.get("columns", DEFAULT_COLUMNS)
for column in columns:
if isinstance(column, dict):
if 'extra-build-parameter' in column:
p_name = column['extra-build-parameter']
if "extra-build-parameter" in column:
p_name = column["extra-build-parameter"]
x = XML.SubElement(
c_xml,
'jenkins.plugins.extracolumns.BuildParametersColumn',
plugin='extra-columns'
"jenkins.plugins.extracolumns.BuildParametersColumn",
plugin="extra-columns",
)
x.append(XML.fromstring("<singlePara>true</singlePara>"))
x.append(
XML.fromstring("<parameterName>%s</parameterName>" % p_name)
)
x.append(XML.fromstring(
'<singlePara>true</singlePara>'))
x.append(XML.fromstring(
'<parameterName>%s</parameterName>' % p_name))
else:
if column in COLUMN_DICT:
if isinstance(COLUMN_DICT[column], list):
x = XML.SubElement(c_xml, COLUMN_DICT[column][0][0],
**COLUMN_DICT[column][0][1])
x = XML.SubElement(
c_xml,
COLUMN_DICT[column][0][0],
**COLUMN_DICT[column][0][1]
)
for tag in COLUMN_DICT[column][1:]:
x.append(XML.fromstring(tag))
else:
XML.SubElement(c_xml, COLUMN_DICT[column])
mapping = [
('regex', 'includeRegex', None),
('recurse', 'recurse', False),
('status-filter', 'statusFilter', None),
("regex", "includeRegex", None),
("recurse", "recurse", False),
("status-filter", "statusFilter", None),
]
helpers.convert_mapping_to_xml(
root, data, mapping, fail_required=False)
helpers.convert_mapping_to_xml(root, data, mapping, fail_required=False)
return root

View File

@ -69,43 +69,45 @@ class Pipeline(jenkins_jobs.modules.base.Base):
sequence = 0
def root_xml(self, data):
linktypes = ['Lightbox', 'New Window']
root = XML.Element('au.com.centrumsystems.hudson.'
'plugin.buildpipeline.BuildPipelineView',
{'plugin': 'build-pipeline-plugin'})
linktypes = ["Lightbox", "New Window"]
root = XML.Element(
"au.com.centrumsystems.hudson." "plugin.buildpipeline.BuildPipelineView",
{"plugin": "build-pipeline-plugin"},
)
mapping_optional = [
('description', 'description', None),
('filter-executors', 'filterExecutors', False),
('filter-queue', 'filterQueue', False),
("description", "description", None),
("filter-executors", "filterExecutors", False),
("filter-queue", "filterQueue", False),
]
helpers.convert_mapping_to_xml(root, data,
mapping_optional, fail_required=False)
helpers.convert_mapping_to_xml(
root, data, mapping_optional, fail_required=False
)
XML.SubElement(root, 'properties',
{'class': 'hudson.model.View$PropertyList'})
XML.SubElement(root, "properties", {"class": "hudson.model.View$PropertyList"})
GBurl = ('au.com.centrumsystems.hudson.plugin.buildpipeline.'
'DownstreamProjectGridBuilder')
gridBuilder = XML.SubElement(root, 'gridBuilder', {'class': GBurl})
GBurl = (
"au.com.centrumsystems.hudson.plugin.buildpipeline."
"DownstreamProjectGridBuilder"
)
gridBuilder = XML.SubElement(root, "gridBuilder", {"class": GBurl})
jobname = data.get('first-job', '')
XML.SubElement(gridBuilder, 'firstJob').text = jobname
jobname = data.get("first-job", "")
XML.SubElement(gridBuilder, "firstJob").text = jobname
mapping = [
('name', 'name', None),
('no-of-displayed-builds', 'noOfDisplayedBuilds', 1),
('title', 'buildViewTitle', ''),
('link-style', 'consoleOutputLinkStyle', 'Lightbox', linktypes),
('css-Url', 'cssUrl', ''),
('latest-job-only', 'triggerOnlyLatestJob', False),
('manual-trigger', 'alwaysAllowManualTrigger', False),
('show-parameters', 'showPipelineParameters', False),
('parameters-in-headers',
'showPipelineParametersInHeaders', False),
('start-with-parameters', 'startsWithParameters', False),
('refresh-frequency', 'refreshFrequency', 3),
('definition-header', 'showPipelineDefinitionHeader', False),
("name", "name", None),
("no-of-displayed-builds", "noOfDisplayedBuilds", 1),
("title", "buildViewTitle", ""),
("link-style", "consoleOutputLinkStyle", "Lightbox", linktypes),
("css-Url", "cssUrl", ""),
("latest-job-only", "triggerOnlyLatestJob", False),
("manual-trigger", "alwaysAllowManualTrigger", False),
("show-parameters", "showPipelineParameters", False),
("parameters-in-headers", "showPipelineParametersInHeaders", False),
("start-with-parameters", "startsWithParameters", False),
("refresh-frequency", "refreshFrequency", 3),
("definition-header", "showPipelineDefinitionHeader", False),
]
helpers.convert_mapping_to_xml(root, data, mapping, fail_required=True)

File diff suppressed because it is too large Load Diff

View File

@ -58,80 +58,129 @@ def zuul_post():
ZUUL_PARAMETERS = [
{'string':
{'description': 'Zuul provided key to link builds with Gerrit events',
'name': 'ZUUL_UUID'}},
{'string':
{'description': 'Zuul provided key to link builds with Gerrit'
' events (deprecated use ZUUL_UUID instead)',
'name': 'UUID'}},
{'string':
{'description': 'Zuul pipeline triggering this job',
'name': 'ZUUL_PIPELINE'}},
{'string':
{'description': 'URL of Zuul\'s git repos accessible to workers',
'name': 'ZUUL_URL'}},
{'string':
{'description': 'Branch name of triggering project',
'name': 'ZUUL_PROJECT'}},
{'string':
{'description': 'Branch name of triggering change',
'name': 'ZUUL_BRANCH'}},
{'string':
{'description': 'List of dependent changes to merge',
'name': 'ZUUL_CHANGES'}},
{'string':
{'description': 'Reference for the merged commit(s) to use',
'name': 'ZUUL_REF'}},
{'string':
{'description': 'The commit SHA1 at the head of ZUUL_REF',
'name': 'ZUUL_COMMIT'}},
{'string':
{'description': 'List of included changes',
'name': 'ZUUL_CHANGE_IDS'}},
{'string':
{'description': 'ID of triggering change',
'name': 'ZUUL_CHANGE'}},
{'string':
{'description': 'Patchset of triggering change',
'name': 'ZUUL_PATCHSET'}},
{'string':
{'description': 'Zuul considered this job voting or not',
'name': 'ZUUL_VOTING'}},
{
"string": {
"description": "Zuul provided key to link builds with Gerrit events",
"name": "ZUUL_UUID",
}
},
{
"string": {
"description": "Zuul provided key to link builds with Gerrit"
" events (deprecated use ZUUL_UUID instead)",
"name": "UUID",
}
},
{
"string": {
"description": "Zuul pipeline triggering this job",
"name": "ZUUL_PIPELINE",
}
},
{
"string": {
"description": "URL of Zuul's git repos accessible to workers",
"name": "ZUUL_URL",
}
},
{
"string": {
"description": "Branch name of triggering project",
"name": "ZUUL_PROJECT",
}
},
{
"string": {
"description": "Branch name of triggering change",
"name": "ZUUL_BRANCH",
}
},
{
"string": {
"description": "List of dependent changes to merge",
"name": "ZUUL_CHANGES",
}
},
{
"string": {
"description": "Reference for the merged commit(s) to use",
"name": "ZUUL_REF",
}
},
{
"string": {
"description": "The commit SHA1 at the head of ZUUL_REF",
"name": "ZUUL_COMMIT",
}
},
{"string": {"description": "List of included changes", "name": "ZUUL_CHANGE_IDS"}},
{"string": {"description": "ID of triggering change", "name": "ZUUL_CHANGE"}},
{
"string": {
"description": "Patchset of triggering change",
"name": "ZUUL_PATCHSET",
}
},
{
"string": {
"description": "Zuul considered this job voting or not",
"name": "ZUUL_VOTING",
}
},
]
ZUUL_POST_PARAMETERS = [
{'string':
{'description': 'Zuul provided key to link builds with Gerrit events',
'name': 'ZUUL_UUID'}},
{'string':
{'description': 'Zuul provided key to link builds with Gerrit'
' events (deprecated use ZUUL_UUID instead)',
'name': 'UUID'}},
{'string':
{'description': 'Zuul pipeline triggering this job',
'name': 'ZUUL_PIPELINE'}},
{'string':
{'description': 'URL of Zuul\'s git repos accessible to workers',
'name': 'ZUUL_URL'}},
{'string':
{'description': 'Branch name of triggering project',
'name': 'ZUUL_PROJECT'}},
{'string':
{'description': 'Name of updated reference triggering this job',
'name': 'ZUUL_REF'}},
{'string':
{'description': 'Name of updated reference triggering this job',
'name': 'ZUUL_REFNAME'}},
{'string':
{'description': 'Old SHA at this reference',
'name': 'ZUUL_OLDREV'}},
{'string':
{'description': 'New SHA at this reference',
'name': 'ZUUL_NEWREV'}},
{'string':
{'description': 'Shortened new SHA at this reference',
'name': 'ZUUL_SHORT_NEWREV'}},
{
"string": {
"description": "Zuul provided key to link builds with Gerrit events",
"name": "ZUUL_UUID",
}
},
{
"string": {
"description": "Zuul provided key to link builds with Gerrit"
" events (deprecated use ZUUL_UUID instead)",
"name": "UUID",
}
},
{
"string": {
"description": "Zuul pipeline triggering this job",
"name": "ZUUL_PIPELINE",
}
},
{
"string": {
"description": "URL of Zuul's git repos accessible to workers",
"name": "ZUUL_URL",
}
},
{
"string": {
"description": "Branch name of triggering project",
"name": "ZUUL_PROJECT",
}
},
{
"string": {
"description": "Name of updated reference triggering this job",
"name": "ZUUL_REF",
}
},
{
"string": {
"description": "Name of updated reference triggering this job",
"name": "ZUUL_REFNAME",
}
},
{"string": {"description": "Old SHA at this reference", "name": "ZUUL_OLDREV"}},
{"string": {"description": "New SHA at this reference", "name": "ZUUL_NEWREV"}},
{
"string": {
"description": "Shortened new SHA at this reference",
"name": "ZUUL_SHORT_NEWREV",
}
},
]
@ -141,23 +190,24 @@ class Zuul(jenkins_jobs.modules.base.Base):
def handle_data(self, job_data):
changed = False
jobs = itertools.chain(
job_data.get('job', {}).values(),
job_data.get('job-template', {}).values())
job_data.get("job", {}).values(), job_data.get("job-template", {}).values()
)
for job in jobs:
triggers = job.get('triggers')
triggers = job.get("triggers")
if not triggers:
continue
if ('zuul' not in job.get('triggers', []) and
'zuul-post' not in job.get('triggers', [])):
if "zuul" not in job.get("triggers", []) and "zuul-post" not in job.get(
"triggers", []
):
continue
if 'parameters' not in job:
job['parameters'] = []
if 'zuul' in job.get('triggers', []):
job['parameters'].extend(ZUUL_PARAMETERS)
job['triggers'].remove('zuul')
if 'zuul-post' in job.get('triggers', []):
job['parameters'].extend(ZUUL_POST_PARAMETERS)
job['triggers'].remove('zuul-post')
if "parameters" not in job:
job["parameters"] = []
if "zuul" in job.get("triggers", []):
job["parameters"].extend(ZUUL_PARAMETERS)
job["triggers"].remove("zuul")
if "zuul-post" in job.get("triggers", []):
job["parameters"].extend(ZUUL_POST_PARAMETERS)
job["triggers"].remove("zuul-post")
changed = True
return changed

View File

@ -33,11 +33,12 @@ class TaskFunc(dict):
"""
Simple class to wrap around the information needed to run a function.
"""
def __init__(self, n_ord, func, args=None, kwargs=None):
self['func'] = func
self['args'] = args or []
self['kwargs'] = kwargs or {}
self['ord'] = n_ord
self["func"] = func
self["args"] = args or []
self["kwargs"] = kwargs or {}
self["ord"] = n_ord
class Worker(threading.Thread):
@ -47,6 +48,7 @@ class Worker(threading.Thread):
If the string 'done' is passed instead of a TaskFunc instance, the thread
will end.
"""
def __init__(self, in_queue, out_queue):
threading.Thread.__init__(self)
self.in_queue = in_queue
@ -55,15 +57,14 @@ class Worker(threading.Thread):
def run(self):
while True:
task = self.in_queue.get()
if task == 'done':
if task == "done":
return
try:
res = task['func'](*task['args'],
**task['kwargs'])
res = task["func"](*task["args"], **task["kwargs"])
except Exception as exc:
res = exc
traceback.print_exc()
self.out_queue.put((task['ord'], res))
self.out_queue.put((task["ord"], res))
def concurrent(func):
@ -102,8 +103,8 @@ def concurrent(func):
array with the results of the executions in the same order the
parameters were passed.
"""
n_workers = kwargs.pop('n_workers', 0)
p_kwargs = kwargs.pop('concurrent', [])
n_workers = kwargs.pop("n_workers", 0)
p_kwargs = kwargs.pop("concurrent", [])
# if only one parameter is passed inside the concurrent dict, run the
# original function as is, no need for pools
if len(p_kwargs) == 1:
@ -133,7 +134,7 @@ def concurrent(func):
in_queue.put(TaskFunc(n_ord, func, args, f_kwargs))
n_ord += 1
for _ in range(n_workers):
in_queue.put('done')
in_queue.put("done")
# Wait for the results
logging.debug("Waiting for workers to finish processing")
@ -148,4 +149,5 @@ def concurrent(func):
results = [r[1] for r in sorted(results)]
logging.debug("Concurrent task finished")
return results
return concurrentized

View File

@ -29,9 +29,7 @@ from jenkins_jobs.formatter import deep_format
import jenkins_jobs.local_yaml as local_yaml
from jenkins_jobs import utils
__all__ = [
"YamlParser"
]
__all__ = ["YamlParser"]
logger = logging.getLogger(__name__)
@ -45,8 +43,7 @@ def matches(what, glob_patterns):
:arg iterable glob_patterns: glob patterns to match (list, tuple, set,
etc.)
"""
return any(fnmatch.fnmatch(what, glob_pattern)
for glob_pattern in glob_patterns)
return any(fnmatch.fnmatch(what, glob_pattern) for glob_pattern in glob_patterns)
def combination_matches(combination, match_combinations):
@ -79,28 +76,32 @@ class YamlParser(object):
self.views = []
self.jjb_config = jjb_config
self.keep_desc = jjb_config.yamlparser['keep_descriptions']
self.path = jjb_config.yamlparser['include_path']
self.keep_desc = jjb_config.yamlparser["keep_descriptions"]
self.path = jjb_config.yamlparser["include_path"]
def load_files(self, fn):
# handle deprecated behavior, and check that it's not a file like
# object as these may implement the '__iter__' attribute.
if not hasattr(fn, '__iter__') or hasattr(fn, 'read'):
if not hasattr(fn, "__iter__") or hasattr(fn, "read"):
logger.warning(
'Passing single elements for the `fn` argument in '
'Builder.load_files is deprecated. Please update your code '
'to use a list as support for automatic conversion will be '
'removed in a future version.')
"Passing single elements for the `fn` argument in "
"Builder.load_files is deprecated. Please update your code "
"to use a list as support for automatic conversion will be "
"removed in a future version."
)
fn = [fn]
files_to_process = []
for path in fn:
if not hasattr(path, 'read') and os.path.isdir(path):
files_to_process.extend([os.path.join(path, f)
for f in sorted(os.listdir(path))
if (f.endswith('.yml') or
f.endswith('.yaml'))])
if not hasattr(path, "read") and os.path.isdir(path):
files_to_process.extend(
[
os.path.join(path, f)
for f in sorted(os.listdir(path))
if (f.endswith(".yml") or f.endswith(".yaml"))
]
)
else:
files_to_process.append(path)
@ -108,41 +109,45 @@ class YamlParser(object):
# definitions of macros and templates when loading all from top-level
unique_files = []
for f in files_to_process:
if hasattr(f, 'read'):
if hasattr(f, "read"):
unique_files.append(f)
continue
rpf = os.path.realpath(f)
if rpf not in unique_files:
unique_files.append(rpf)
else:
logger.warning("File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf))
logger.warning(
"File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf)
)
for in_file in unique_files:
# use of ask-for-permissions instead of ask-for-forgiveness
# performs better when low use cases.
if hasattr(in_file, 'name'):
if hasattr(in_file, "name"):
fname = in_file.name
else:
fname = in_file
logger.debug("Parsing YAML file {0}".format(fname))
if hasattr(in_file, 'read'):
if hasattr(in_file, "read"):
self._parse_fp(in_file)
else:
self.parse(in_file)
def _parse_fp(self, fp):
# wrap provided file streams to ensure correct encoding used
data = local_yaml.load(utils.wrap_stream(fp),
self.jjb_config.yamlparser['retain_anchors'],
search_path=self.path)
data = local_yaml.load(
utils.wrap_stream(fp),
self.jjb_config.yamlparser["retain_anchors"],
search_path=self.path,
)
if data:
if not isinstance(data, list):
raise JenkinsJobsException(
"The topmost collection in file '{fname}' must be a list,"
" not a {cls}".format(fname=getattr(fp, 'name', fp),
cls=type(data)))
" not a {cls}".format(fname=getattr(fp, "name", fp), cls=type(data))
)
for item in data:
cls, dfn = next(iter(item.items()))
group = self.data.get(cls, {})
@ -153,41 +158,43 @@ class YamlParser(object):
n = v
break
# Syntax error
raise JenkinsJobsException("Syntax error, for item "
"named '{0}'. Missing indent?"
.format(n))
raise JenkinsJobsException(
"Syntax error, for item "
"named '{0}'. Missing indent?".format(n)
)
# allow any entry to specify an id that can also be used
_id = dfn.get('id', dfn['name'])
_id = dfn.get("id", dfn["name"])
if _id in group:
self._handle_dups(
"Duplicate entry found in '{0}: '{1}' already "
"defined".format(fp.name, _id))
"defined".format(fp.name, _id)
)
group[_id] = dfn
self.data[cls] = group
def parse(self, fn):
with io.open(fn, 'r', encoding='utf-8') as fp:
with io.open(fn, "r", encoding="utf-8") as fp:
self._parse_fp(fp)
def _handle_dups(self, message):
if not self.jjb_config.yamlparser['allow_duplicates']:
if not self.jjb_config.yamlparser["allow_duplicates"]:
logger.error(message)
raise JenkinsJobsException(message)
else:
logger.warning(message)
def _getJob(self, name):
job = self.data.get('job', {}).get(name, None)
job = self.data.get("job", {}).get(name, None)
if not job:
return job
return self._applyDefaults(job)
def _getJobGroup(self, name):
return self.data.get('job-group', {}).get(name, None)
return self.data.get("job-group", {}).get(name, None)
def _getJobTemplate(self, name):
job = self.data.get('job-template', {}).get(name, None)
job = self.data.get("job-template", {}).get(name, None)
if not job:
return job
return self._applyDefaults(job)
@ -196,12 +203,12 @@ class YamlParser(object):
if override_dict is None:
override_dict = {}
whichdefaults = data.get('defaults', 'global')
defaults = copy.deepcopy(self.data.get('defaults',
{}).get(whichdefaults, {}))
if defaults == {} and whichdefaults != 'global':
raise JenkinsJobsException("Unknown defaults set: '{0}'"
.format(whichdefaults))
whichdefaults = data.get("defaults", "global")
defaults = copy.deepcopy(self.data.get("defaults", {}).get(whichdefaults, {}))
if defaults == {} and whichdefaults != "global":
raise JenkinsJobsException(
"Unknown defaults set: '{0}'".format(whichdefaults)
)
for key in override_dict.keys():
if key in defaults.keys():
@ -216,53 +223,52 @@ class YamlParser(object):
if self.keep_desc:
description = job.get("description", None)
else:
description = job.get("description", '')
description = job.get("description", "")
if description is not None:
job["description"] = description + \
self._get_managed_string().lstrip()
job["description"] = description + self._get_managed_string().lstrip()
def _getfullname(self, data):
if 'folder' in data:
return "%s/%s" % (data['folder'], data['name'])
if "folder" in data:
return "%s/%s" % (data["folder"], data["name"])
return data['name']
return data["name"]
def expandYaml(self, registry, jobs_glob=None):
changed = True
while changed:
changed = False
for module in registry.modules:
if hasattr(module, 'handle_data'):
if hasattr(module, "handle_data"):
if module.handle_data(self.data):
changed = True
for job in self.data.get('job', {}).values():
for job in self.data.get("job", {}).values():
job = self._applyDefaults(job)
job['name'] = self._getfullname(job)
job["name"] = self._getfullname(job)
if jobs_glob and not matches(job['name'], jobs_glob):
logger.debug("Ignoring job {0}".format(job['name']))
if jobs_glob and not matches(job["name"], jobs_glob):
logger.debug("Ignoring job {0}".format(job["name"]))
continue
logger.debug("Expanding job '{0}'".format(job['name']))
logger.debug("Expanding job '{0}'".format(job["name"]))
self._formatDescription(job)
self.jobs.append(job)
for view in self.data.get('view', {}).values():
view['name'] = self._getfullname(view)
for view in self.data.get("view", {}).values():
view["name"] = self._getfullname(view)
if jobs_glob and not matches(view['name'], jobs_glob):
logger.debug("Ignoring view {0}".format(view['name']))
if jobs_glob and not matches(view["name"], jobs_glob):
logger.debug("Ignoring view {0}".format(view["name"]))
continue
logger.debug("Expanding view '{0}'".format(view['name']))
logger.debug("Expanding view '{0}'".format(view["name"]))
self._formatDescription(view)
self.views.append(view)
for project in self.data.get('project', {}).values():
logger.debug("Expanding project '{0}'".format(project['name']))
for project in self.data.get("project", {}).values():
logger.debug("Expanding project '{0}'".format(project["name"]))
# use a set to check for duplicate job references in projects
seen = set()
for jobspec in project.get('jobs', []):
for jobspec in project.get("jobs", []):
if isinstance(jobspec, dict):
# Singleton dict containing dict of job-specific params
jobname, jobparams = next(iter(jobspec.items()))
@ -275,18 +281,20 @@ class YamlParser(object):
if job:
# Just naming an existing defined job
if jobname in seen:
self._handle_dups("Duplicate job '{0}' specified "
"for project '{1}'"
.format(jobname, project['name']))
self._handle_dups(
"Duplicate job '{0}' specified "
"for project '{1}'".format(jobname, project["name"])
)
seen.add(jobname)
continue
# see if it's a job group
group = self._getJobGroup(jobname)
if group:
for group_jobspec in group['jobs']:
for group_jobspec in group["jobs"]:
if isinstance(group_jobspec, dict):
group_jobname, group_jobparams = \
next(iter(group_jobspec.items()))
group_jobname, group_jobparams = next(
iter(group_jobspec.items())
)
if not isinstance(group_jobparams, dict):
group_jobparams = {}
else:
@ -297,8 +305,10 @@ class YamlParser(object):
if group_jobname in seen:
self._handle_dups(
"Duplicate job '{0}' specified for "
"project '{1}'".format(group_jobname,
project['name']))
"project '{1}'".format(
group_jobname, project["name"]
)
)
seen.add(group_jobname)
continue
template = self._getJobTemplate(group_jobname)
@ -308,10 +318,9 @@ class YamlParser(object):
d.update(group)
d.update(group_jobparams)
# Except name, since the group's name is not useful
d['name'] = project['name']
d["name"] = project["name"]
if template:
self._expandYamlForTemplateJob(d, template,
jobs_glob)
self._expandYamlForTemplateJob(d, template, jobs_glob)
continue
# see if it's a template
template = self._getJobTemplate(jobname)
@ -320,11 +329,12 @@ class YamlParser(object):
d.update(jobparams)
self._expandYamlForTemplateJob(d, template, jobs_glob)
else:
raise JenkinsJobsException("Failed to find suitable "
"template named '{0}'"
.format(jobname))
raise JenkinsJobsException(
"Failed to find suitable "
"template named '{0}'".format(jobname)
)
for viewspec in project.get('views', []):
for viewspec in project.get("views", []):
if isinstance(viewspec, dict):
# Singleton dict containing dict of view-specific params
viewname, viewparams = next(iter(viewspec.items()))
@ -337,18 +347,20 @@ class YamlParser(object):
if view:
# Just naming an existing defined view
if viewname in seen:
self._handle_dups("Duplicate view '{0}' specified "
"for project '{1}'"
.format(viewname, project['name']))
self._handle_dups(
"Duplicate view '{0}' specified "
"for project '{1}'".format(viewname, project["name"])
)
seen.add(viewname)
continue
# see if it's a view group
group = self._getViewGroup(viewname)
if group:
for group_viewspec in group['views']:
for group_viewspec in group["views"]:
if isinstance(group_viewspec, dict):
group_viewname, group_viewparams = \
next(iter(group_viewspec.items()))
group_viewname, group_viewparams = next(
iter(group_viewspec.items())
)
if not isinstance(group_viewparams, dict):
group_viewparams = {}
else:
@ -359,8 +371,10 @@ class YamlParser(object):
if group_viewname in seen:
self._handle_dups(
"Duplicate view '{0}' specified for "
"project '{1}'".format(group_viewname,
project['name']))
"project '{1}'".format(
group_viewname, project["name"]
)
)
seen.add(group_viewname)
continue
template = self._getViewTemplate(group_viewname)
@ -370,10 +384,9 @@ class YamlParser(object):
d.update(group)
d.update(group_viewparams)
# Except name, since the group's name is not useful
d['name'] = project['name']
d["name"] = project["name"]
if template:
self._expandYamlForTemplateView(
d, template, jobs_glob)
self._expandYamlForTemplateView(d, template, jobs_glob)
continue
# see if it's a template
template = self._getViewTemplate(viewname)
@ -382,41 +395,46 @@ class YamlParser(object):
d.update(viewparams)
self._expandYamlForTemplateView(d, template, jobs_glob)
else:
raise JenkinsJobsException("Failed to find suitable "
"template named '{0}'"
.format(viewname))
raise JenkinsJobsException(
"Failed to find suitable "
"template named '{0}'".format(viewname)
)
# check for duplicate generated jobs
seen = set()
# walk the list in reverse so that last definition wins
for job in self.jobs[::-1]:
if job['name'] in seen:
self._handle_dups("Duplicate definitions for job '{0}' "
"specified".format(job['name']))
if job["name"] in seen:
self._handle_dups(
"Duplicate definitions for job '{0}' "
"specified".format(job["name"])
)
self.jobs.remove(job)
seen.add(job['name'])
seen.add(job["name"])
# check for duplicate generated views
seen_views = set()
# walk the list in reverse so that last definition wins
for view in self.views[::-1]:
if view['name'] in seen_views:
self._handle_dups("Duplicate definitions for view '{0}' "
"specified".format(view['name']))
if view["name"] in seen_views:
self._handle_dups(
"Duplicate definitions for view '{0}' "
"specified".format(view["name"])
)
self.views.remove(view)
seen_views.add(view['name'])
seen_views.add(view["name"])
return self.jobs, self.views
def _expandYamlForTemplateJob(self, project, template, jobs_glob=None):
dimensions = []
template_name = template['name']
template_name = template["name"]
# reject keys that are not useful during yaml expansion
for k in ['jobs']:
for k in ["jobs"]:
project.pop(k)
excludes = project.pop('exclude', [])
excludes = project.pop("exclude", [])
for (k, v) in project.items():
tmpk = '{{{0}}}'.format(k)
tmpk = "{{{0}}}".format(k)
if tmpk not in template_name:
continue
if type(v) == list:
@ -429,7 +447,7 @@ class YamlParser(object):
for values in itertools.product(*dimensions):
params = copy.deepcopy(project)
params = self._applyDefaults(params, template)
params['template-name'] = re.sub(r'({|})', r'\1\1', template_name)
params["template-name"] = re.sub(r"({|})", r"\1\1", template_name)
try:
expanded_values = {}
@ -441,29 +459,32 @@ class YamlParser(object):
else:
expanded_values[k] = v
except TypeError:
project_name = project.pop('name')
project_name = project.pop("name")
logger.error(
"Exception thrown while expanding template '%s' for "
"project '%s', with expansion arguments of:\n%s\n"
"Original project input variables for template:\n%s\n"
"Most likely the inputs have items indented incorrectly "
"to describe how they should be applied.\n\nNote yaml "
"'null' is mapped to python's 'None'", template_name,
"'null' is mapped to python's 'None'",
template_name,
project_name,
"".join(local_yaml.dump({k: v}, default_flow_style=False)
for (k, v) in values),
local_yaml.dump(project, default_flow_style=False))
"".join(
local_yaml.dump({k: v}, default_flow_style=False)
for (k, v) in values
),
local_yaml.dump(project, default_flow_style=False),
)
raise
params.update(expanded_values)
try:
params = deep_format(params, params)
except Exception:
logging.error(
"Failure formatting params '%s' with itself", params)
logging.error("Failure formatting params '%s' with itself", params)
raise
if combination_matches(params, excludes):
logger.debug('Excluding combination %s', str(params))
logger.debug("Excluding combination %s", str(params))
continue
for key in template.keys():
@ -472,16 +493,22 @@ class YamlParser(object):
try:
expanded = deep_format(
template, params,
self.jjb_config.yamlparser['allow_empty_variables'])
template,
params,
self.jjb_config.yamlparser["allow_empty_variables"],
)
except Exception:
logging.error(
"Failure formatting template '%s', containing '%s' with "
"params '%s'", template_name, template, params)
"params '%s'",
template_name,
template,
params,
)
raise
expanded['name'] = self._getfullname(expanded)
expanded["name"] = self._getfullname(expanded)
job_name = expanded.get('name')
job_name = expanded.get("name")
if jobs_glob and not matches(job_name, jobs_glob):
continue
@ -495,29 +522,29 @@ class YamlParser(object):
# Views related
def _getView(self, name):
view = self.data.get('view', {}).get(name, None)
view = self.data.get("view", {}).get(name, None)
if not view:
return view
return self._applyDefaults(view)
def _getViewGroup(self, name):
return self.data.get('view-group', {}).get(name, None)
return self.data.get("view-group", {}).get(name, None)
def _getViewTemplate(self, name):
view = self.data.get('view-template', {}).get(name, None)
view = self.data.get("view-template", {}).get(name, None)
if not view:
return view
return self._applyDefaults(view)
def _expandYamlForTemplateView(self, project, template, views_glob=None):
dimensions = []
template_name = template['name']
template_name = template["name"]
# reject keys that are not useful during yaml expansion
for k in ['views']:
for k in ["views"]:
project.pop(k)
excludes = project.pop('exclude', [])
excludes = project.pop("exclude", [])
for (k, v) in project.items():
tmpk = '{{{0}}}'.format(k)
tmpk = "{{{0}}}".format(k)
if tmpk not in template_name:
continue
if type(v) == list:
@ -543,19 +570,19 @@ class YamlParser(object):
params.update(expanded_values)
params = deep_format(params, params)
if combination_matches(params, excludes):
logger.debug('Excluding combination %s', str(params))
logger.debug("Excluding combination %s", str(params))
continue
for key in template.keys():
if key not in params:
params[key] = template[key]
params['template-name'] = template_name
params["template-name"] = template_name
expanded = deep_format(
template, params,
self.jjb_config.yamlparser['allow_empty_variables'])
template, params, self.jjb_config.yamlparser["allow_empty_variables"]
)
view_name = expanded.get('name')
view_name = expanded.get("name")
if views_glob and not matches(view_name, views_glob):
continue

View File

@ -25,9 +25,7 @@ from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.formatter import deep_format
from jenkins_jobs.local_yaml import Jinja2Loader
__all__ = [
"ModuleRegistry"
]
__all__ = ["ModuleRegistry"]
logger = logging.getLogger(__name__)
@ -47,12 +45,11 @@ class ModuleRegistry(object):
else:
self.plugins_dict = self._get_plugins_info_dict(plugins_list)
for entrypoint in pkg_resources.iter_entry_points(
group='jenkins_jobs.modules'):
for entrypoint in pkg_resources.iter_entry_points(group="jenkins_jobs.modules"):
Mod = entrypoint.load()
mod = Mod(self)
self.modules.append(mod)
self.modules.sort(key=operator.attrgetter('sequence'))
self.modules.sort(key=operator.attrgetter("sequence"))
if mod.component_type is not None:
self.modules_by_component_type[mod.component_type] = entrypoint
@ -64,12 +61,13 @@ class ModuleRegistry(object):
return a dictionary with the longName and shortName of the plugin
mapped to its plugin info dictionary.
"""
version = plugin_info.get('version', '0')
plugin_info['version'] = re.sub(r'(.*)-(?:SNAPSHOT|BETA).*',
r'\g<1>.preview', version)
version = plugin_info.get("version", "0")
plugin_info["version"] = re.sub(
r"(.*)-(?:SNAPSHOT|BETA).*", r"\g<1>.preview", version
)
aliases = []
for key in ['longName', 'shortName']:
for key in ["longName", "shortName"]:
value = plugin_info.get(key, None)
if value is not None:
aliases.append(value)
@ -130,8 +128,7 @@ class ModuleRegistry(object):
def set_parser_data(self, parser_data):
self.__parser_data = parser_data
def dispatch(self, component_type, xml_parent,
component, template_data={}):
def dispatch(self, component_type, xml_parent, component, template_data={}):
"""This is a method that you can call from your implementation of
Base.gen_xml or component. It allows modules to define a type
of component, and benefit from extensibility via Python
@ -152,8 +149,9 @@ class ModuleRegistry(object):
"""
if component_type not in self.modules_by_component_type:
raise JenkinsJobsException("Unknown component type: "
"'{0}'.".format(component_type))
raise JenkinsJobsException(
"Unknown component type: " "'{0}'.".format(component_type)
)
entry_point = self.modules_by_component_type[component_type]
component_list_type = entry_point.load().component_list_type
@ -167,12 +165,16 @@ class ModuleRegistry(object):
# that don't contain any variables, we also deep format those.
try:
component_data = deep_format(
component_data, template_data,
self.jjb_config.yamlparser['allow_empty_variables'])
component_data,
template_data,
self.jjb_config.yamlparser["allow_empty_variables"],
)
except Exception:
logging.error(
"Failure formatting component ('%s') data '%s'",
name, component_data)
name,
component_data,
)
raise
else:
# The component is a simple string name, eg "run-tests"
@ -185,41 +187,54 @@ class ModuleRegistry(object):
module_eps = []
# auto build entry points by inferring from base component_types
mod = pkg_resources.EntryPoint(
"__all__", entry_point.module_name, dist=entry_point.dist)
"__all__", entry_point.module_name, dist=entry_point.dist
)
Mod = mod.load()
func_eps = [Mod.__dict__.get(a) for a in dir(Mod)
if isinstance(Mod.__dict__.get(a),
types.FunctionType)]
func_eps = [
Mod.__dict__.get(a)
for a in dir(Mod)
if isinstance(Mod.__dict__.get(a), types.FunctionType)
]
for func_ep in func_eps:
try:
# extract entry point based on docstring
name_line = func_ep.__doc__.split('\n')
if not name_line[0].startswith('yaml:'):
logger.debug("Ignoring '%s' as an entry point" %
name_line)
name_line = func_ep.__doc__.split("\n")
if not name_line[0].startswith("yaml:"):
logger.debug("Ignoring '%s' as an entry point" % name_line)
continue
ep_name = name_line[0].split(' ')[1]
ep_name = name_line[0].split(" ")[1]
except (AttributeError, IndexError):
# AttributeError by docstring not being defined as
# a string to have split called on it.
# IndexError raised by name_line not containing anything
# after the 'yaml:' string.
logger.debug("Not including func '%s' as an entry point"
% func_ep.__name__)
logger.debug(
"Not including func '%s' as an entry point" % func_ep.__name__
)
continue
module_eps.append(
pkg_resources.EntryPoint(
ep_name, entry_point.module_name,
dist=entry_point.dist, attrs=(func_ep.__name__,)))
ep_name,
entry_point.module_name,
dist=entry_point.dist,
attrs=(func_ep.__name__,),
)
)
logger.debug(
"Adding auto EP '%s=%s:%s'" %
(ep_name, entry_point.module_name, func_ep.__name__))
"Adding auto EP '%s=%s:%s'"
% (ep_name, entry_point.module_name, func_ep.__name__)
)
# load from explicitly defined entry points
module_eps.extend(list(pkg_resources.iter_entry_points(
group='jenkins_jobs.{0}'.format(component_list_type))))
module_eps.extend(
list(
pkg_resources.iter_entry_points(
group="jenkins_jobs.{0}".format(component_list_type)
)
)
)
eps = {}
for module_ep in module_eps:
@ -227,14 +242,14 @@ class ModuleRegistry(object):
raise JenkinsJobsException(
"Duplicate entry point found for component type: "
"'{0}', '{0}',"
"name: '{1}'".format(component_type, name))
"name: '{1}'".format(component_type, name)
)
eps[module_ep.name] = module_ep
# cache both sets of entry points
self._entry_points_cache[component_list_type] = eps
logger.debug("Cached entry point group %s = %s",
component_list_type, eps)
logger.debug("Cached entry point group %s = %s", component_list_type, eps)
# check for macro first
component = self.parser_data.get(component_type, {}).get(name)
@ -244,7 +259,8 @@ class ModuleRegistry(object):
logger.warning(
"You have a macro ('%s') defined for '%s' "
"component type that is masking an inbuilt "
"definition" % (name, component_type))
"definition" % (name, component_type)
)
for b in component[component_list_type]:
# Pass component_data in as template data to this function
@ -255,6 +271,7 @@ class ModuleRegistry(object):
func = eps[name].load()
func(self, xml_parent, component_data)
else:
raise JenkinsJobsException("Unknown entry point or macro '{0}' "
"for component type: '{1}'.".
format(name, component_type))
raise JenkinsJobsException(
"Unknown entry point or macro '{0}' "
"for component type: '{1}'.".format(name, component_type)
)

View File

@ -29,7 +29,7 @@ from sphinx.ext.autodoc import FunctionDocumenter
from sphinx.locale import _
yaml_sig_re = re.compile(r'yaml:\s*(.*)')
yaml_sig_re = re.compile(r"yaml:\s*(.*)")
class PyYAMLFunction(PyModulelevel):
@ -48,15 +48,14 @@ class PyYAMLFunction(PyModulelevel):
retann = None
# determine module and class name (if applicable), as well as full name
modname = self.options.get(
'module', self.env.temp_data.get('py:module'))
classname = self.env.temp_data.get('py:class')
modname = self.options.get("module", self.env.temp_data.get("py:module"))
classname = self.env.temp_data.get("py:class")
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
signode["module"] = modname
signode["class"] = classname
signode["fullname"] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
@ -65,7 +64,7 @@ class PyYAMLFunction(PyModulelevel):
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix)
anno = self.options.get('annotation')
anno = self.options.get("annotation")
signode += addnodes.desc_name(name, name)
if not arglist:
@ -75,29 +74,30 @@ class PyYAMLFunction(PyModulelevel):
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
signode += addnodes.desc_annotation(" " + anno, " " + anno)
return fullname, name_prefix
_pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
signode += addnodes.desc_annotation(" " + anno, " " + anno)
return fullname, name_prefix
def get_index_text(self, modname, name_cls):
return _('%s (in module %s)') % (name_cls[0], modname)
return _("%s (in module %s)") % (name_cls[0], modname)
class YAMLFunctionDocumenter(FunctionDocumenter):
priority = FunctionDocumenter.priority + 10
objtype = 'yamlfunction'
directivetype = 'yamlfunction'
objtype = "yamlfunction"
directivetype = "yamlfunction"
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
if not FunctionDocumenter.can_document_member(member, membername,
isattr, parent):
if not FunctionDocumenter.can_document_member(
member, membername, isattr, parent
):
return False
if member.__doc__ is not None and yaml_sig_re.match(member.__doc__):
return True
@ -108,7 +108,7 @@ class YAMLFunctionDocumenter(FunctionDocumenter):
if len(docstrings) != 1:
return
doclines = docstrings[0]
setattr(self, '__new_doclines', doclines)
setattr(self, "__new_doclines", doclines)
if not doclines:
return
# match first line of docstring against signature RE
@ -121,11 +121,11 @@ class YAMLFunctionDocumenter(FunctionDocumenter):
i = 1
while i < len(doclines) and not doclines[i].strip():
i += 1
setattr(self, '__new_doclines', doclines[i:])
setattr(self, "__new_doclines", doclines[i:])
return name
def get_doc(self, encoding=None, ignore=1):
lines = getattr(self, '__new_doclines', None)
lines = getattr(self, "__new_doclines", None)
if lines is not None:
return [lines]
return Documenter.get_doc(self, encoding, ignore)
@ -133,7 +133,7 @@ class YAMLFunctionDocumenter(FunctionDocumenter):
def format_signature(self):
result = self._find_signature()
self._name = result
return ''
return ""
def format_name(self):
return self._name
@ -141,4 +141,4 @@ class YAMLFunctionDocumenter(FunctionDocumenter):
def setup(app):
app.add_autodocumenter(YAMLFunctionDocumenter)
app.add_directive_to_domain('py', 'yamlfunction', PyYAMLFunction)
app.add_directive_to_domain("py", "yamlfunction", PyYAMLFunction)

View File

@ -22,14 +22,14 @@ import os.path
from six.moves import input
def wrap_stream(stream, encoding='utf-8'):
def wrap_stream(stream, encoding="utf-8"):
try:
stream_enc = stream.encoding
except AttributeError:
stream_enc = locale.getpreferredencoding()
if hasattr(stream, 'buffer'):
if hasattr(stream, "buffer"):
stream = stream.buffer
if str(stream_enc).lower() == str(encoding).lower():
@ -47,21 +47,27 @@ def recurse_path(root, excludes=None):
patterns = [e for e in excludes if os.path.sep not in e]
absolute = [e for e in excludes if os.path.isabs(e)]
relative = [e for e in excludes if os.path.sep in e and
not os.path.isabs(e)]
relative = [e for e in excludes if os.path.sep in e and not os.path.isabs(e)]
for root, dirs, files in os.walk(basepath, topdown=True):
# sort in-place to ensure dirnames are visited in alphabetical order
# a predictable order makes it easier to use the retain_anchors option
dirs.sort()
dirs[:] = [
d for d in dirs
d
for d in dirs
if not any([fnmatch.fnmatch(d, pattern) for pattern in patterns])
if not any([fnmatch.fnmatch(os.path.abspath(os.path.join(root, d)),
path)
for path in absolute])
if not any([fnmatch.fnmatch(os.path.relpath(os.path.join(root, d)),
path)
for path in relative])
if not any(
[
fnmatch.fnmatch(os.path.abspath(os.path.join(root, d)), path)
for path in absolute
]
)
if not any(
[
fnmatch.fnmatch(os.path.relpath(os.path.join(root, d)), path)
for path in relative
]
)
]
pathlist.extend([os.path.join(root, path) for path in dirs])
@ -69,5 +75,5 @@ def recurse_path(root, excludes=None):
def confirm(question):
answer = input('%s (Y/N): ' % question).upper().strip()
return answer == 'Y'
answer = input("%s (Y/N): " % question).upper().strip()
return answer == "Y"

View File

@ -17,4 +17,4 @@
from pbr.version import VersionInfo
version_info = VersionInfo('jenkins-job-builder')
version_info = VersionInfo("jenkins-job-builder")

View File

@ -22,10 +22,7 @@ import xml.etree.ElementTree as XML
from jenkins_jobs import errors
__all__ = [
"XmlJobGenerator",
"XmlJob"
]
__all__ = ["XmlJobGenerator", "XmlJob"]
def remove_ignorable_whitespace(node):
@ -59,8 +56,8 @@ class XmlJob(object):
return hashlib.md5(self.output()).hexdigest()
def output(self):
out = minidom.parseString(XML.tostring(self.xml, encoding='UTF-8'))
return out.toprettyxml(indent=' ', encoding='utf-8')
out = minidom.parseString(XML.tostring(self.xml, encoding="UTF-8"))
return out.toprettyxml(indent=" ", encoding="utf-8")
class XmlGenerator(object):
@ -86,25 +83,29 @@ class XmlGenerator(object):
kind = data.get(self.kind_attribute, self.kind_default)
for ep in pkg_resources.iter_entry_points(
group=self.entry_point_group, name=kind):
group=self.entry_point_group, name=kind
):
Mod = ep.load()
mod = Mod(self.registry)
xml = mod.root_xml(data)
if "view-type" not in data:
self._gen_xml(xml, data)
obj = XmlJob(xml, data['name'])
obj = XmlJob(xml, data["name"])
return obj
names = [
ep.name for ep in pkg_resources.iter_entry_points(
group=self.entry_point_group)]
ep.name
for ep in pkg_resources.iter_entry_points(group=self.entry_point_group)
]
raise errors.JenkinsJobsException(
'Unrecognized {}: {} (supported types are: {})'.format(
self.kind_attribute, kind, ', '.join(names)))
"Unrecognized {}: {} (supported types are: {})".format(
self.kind_attribute, kind, ", ".join(names)
)
)
def _gen_xml(self, xml, data):
for module in self.registry.modules:
if hasattr(module, 'gen_xml'):
if hasattr(module, "gen_xml"):
module.gen_xml(xml, data)
@ -112,15 +113,17 @@ class XmlJobGenerator(XmlGenerator):
""" This class is responsible for generating Jenkins Configuration XML from
a compatible intermediate representation of Jenkins Jobs.
"""
entry_point_group = 'jenkins_jobs.projects'
kind_attribute = 'project-type'
kind_default = 'freestyle'
entry_point_group = "jenkins_jobs.projects"
kind_attribute = "project-type"
kind_default = "freestyle"
class XmlViewGenerator(XmlGenerator):
""" This class is responsible for generating Jenkins Configuration XML from
a compatible intermediate representation of Jenkins Views.
"""
entry_point_group = 'jenkins_jobs.views'
kind_attribute = 'view-type'
kind_default = 'list'
entry_point_group = "jenkins_jobs.views"
kind_attribute = "view-type"
kind_default = "list"

View File

@ -100,10 +100,8 @@ jenkins_jobs.modules =
zuul=jenkins_jobs.modules.zuul:Zuul
[flake8]
# These are ignored intentionally in openstack-infra projects; please
# don't submit patches that solely correct them or enable them.
# W504 is controversial an apparently conflicting with W503, being impossible
# to solve both of them while still keeping the line length limited.
ignore = E125,E128,H,W504
# Based on https://ljvmiranda921.github.io/notebook/2018/06/21/precommits-using-black-and-flake8/
ignore = E125,E128,E203,E501,H,W504,W503
show-source = True
exclude = .virtualenv,.venv,.tox,dist,build,*.egg,.test
max-line-length = 88

View File

@ -24,6 +24,4 @@ try:
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=1.8'],
pbr=True)
setuptools.setup(setup_requires=["pbr>=1.8"], pbr=True)

View File

@ -59,9 +59,13 @@ except ImportError:
import mock # noqa
def get_scenarios(fixtures_path, in_ext='yaml', out_ext='xml',
plugins_info_ext='plugins_info.yaml',
filter_func=None):
def get_scenarios(
fixtures_path,
in_ext="yaml",
out_ext="xml",
plugins_info_ext="plugins_info.yaml",
filter_func=None,
):
"""Returns a list of scenarios, each scenario being described
by two parameters (yaml and xml filenames by default).
- content of the fixture output file (aka expected)
@ -75,8 +79,9 @@ def get_scenarios(fixtures_path, in_ext='yaml', out_ext='xml',
else:
files[fn] = [os.path.join(dirpath, fn)]
input_files = [files[f][0] for f in files if
re.match(r'.*\.{0}$'.format(in_ext), f)]
input_files = [
files[f][0] for f in files if re.match(r".*\.{0}$".format(in_ext), f)
]
for input_filename in input_files:
if input_filename.endswith(plugins_info_ext):
@ -85,21 +90,22 @@ def get_scenarios(fixtures_path, in_ext='yaml', out_ext='xml',
if callable(filter_func) and filter_func(input_filename):
continue
output_candidate = re.sub(r'\.{0}$'.format(in_ext),
'.{0}'.format(out_ext), input_filename)
output_candidate = re.sub(
r"\.{0}$".format(in_ext), ".{0}".format(out_ext), input_filename
)
# assume empty file if no output candidate found
if os.path.basename(output_candidate) in files:
out_filenames = files[os.path.basename(output_candidate)]
else:
out_filenames = None
plugins_info_candidate = re.sub(r'\.{0}$'.format(in_ext),
'.{0}'.format(plugins_info_ext),
input_filename)
plugins_info_candidate = re.sub(
r"\.{0}$".format(in_ext), ".{0}".format(plugins_info_ext), input_filename
)
if os.path.basename(plugins_info_candidate) not in files:
plugins_info_candidate = None
conf_candidate = re.sub(r'\.yaml$|\.json$', '.conf', input_filename)
conf_candidate = re.sub(r"\.yaml$|\.json$", ".conf", input_filename)
conf_filename = files.get(os.path.basename(conf_candidate), None)
if conf_filename:
@ -108,12 +114,17 @@ def get_scenarios(fixtures_path, in_ext='yaml', out_ext='xml',
# for testing purposes we want to avoid using user config files
conf_filename = os.devnull
scenarios.append((input_filename, {
'in_filename': input_filename,
'out_filenames': out_filenames,
'conf_filename': conf_filename,
'plugins_info_filename': plugins_info_candidate,
}))
scenarios.append(
(
input_filename,
{
"in_filename": input_filename,
"out_filenames": out_filenames,
"conf_filename": conf_filename,
"plugins_info_filename": plugins_info_candidate,
},
)
)
return scenarios
@ -121,7 +132,7 @@ def get_scenarios(fixtures_path, in_ext='yaml', out_ext='xml',
class BaseTestCase(testtools.TestCase):
# TestCase settings:
maxDiff = None # always dump text difference
maxDiff = None # always dump text difference
longMessage = True # keep normal error message when providing our
def setUp(self):
@ -137,12 +148,12 @@ class BaseTestCase(testtools.TestCase):
# Read XML content, assuming it is unicode encoded
xml_content = ""
for f in sorted(self.out_filenames):
with io.open(f, 'r', encoding='utf-8') as xml_file:
with io.open(f, "r", encoding="utf-8") as xml_file:
xml_content += u"%s" % xml_file.read()
return xml_content
def _read_yaml_content(self, filename):
with io.open(filename, 'r', encoding='utf-8') as yaml_file:
with io.open(filename, "r", encoding="utf-8") as yaml_file:
yaml_content = yaml.load(yaml_file)
return yaml_content
@ -170,10 +181,10 @@ class BaseScenariosTestCase(testscenarios.TestWithScenarios, BaseTestCase):
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail("plugins-info-filename",
text_content(self.plugins_info_filename))
self.addDetail("plugins-info",
text_content(str(plugins_info)))
self.addDetail(
"plugins-info-filename", text_content(self.plugins_info_filename)
)
self.addDetail("plugins-info", text_content(str(plugins_info)))
parser = YamlParser(jjb_config)
registry = ModuleRegistry(jjb_config, plugins_info)
@ -182,76 +193,89 @@ class BaseScenariosTestCase(testscenarios.TestWithScenarios, BaseTestCase):
pub = self.klass(registry)
project = None
if ('project-type' in yaml_content):
if (yaml_content['project-type'] == "maven"):
if "project-type" in yaml_content:
if yaml_content["project-type"] == "maven":
project = project_maven.Maven(registry)
elif (yaml_content['project-type'] == "matrix"):
elif yaml_content["project-type"] == "matrix":
project = project_matrix.Matrix(registry)
elif (yaml_content['project-type'] == "flow"):
elif yaml_content["project-type"] == "flow":
project = project_flow.Flow(registry)
elif (yaml_content['project-type'] == "multijob"):
elif yaml_content["project-type"] == "multijob":
project = project_multijob.MultiJob(registry)
elif (yaml_content['project-type'] == "multibranch"):
elif yaml_content["project-type"] == "multibranch":
project = project_multibranch.WorkflowMultiBranch(registry)
elif (yaml_content['project-type'] == "multibranch-defaults"):
project = project_multibranch.WorkflowMultiBranchDefaults(registry) # noqa
elif (yaml_content['project-type'] == "externaljob"):
elif yaml_content["project-type"] == "multibranch-defaults":
project = project_multibranch.WorkflowMultiBranchDefaults(
registry
) # noqa
elif yaml_content["project-type"] == "externaljob":
project = project_externaljob.ExternalJob(registry)
if 'view-type' in yaml_content:
if yaml_content['view-type'] == "all":
if "view-type" in yaml_content:
if yaml_content["view-type"] == "all":
project = view_all.All(None)
elif yaml_content['view-type'] == "list":
elif yaml_content["view-type"] == "list":
project = view_list.List(None)
elif yaml_content['view-type'] == "pipeline":
elif yaml_content["view-type"] == "pipeline":
project = view_pipeline.Pipeline(None)
else:
raise InvalidAttributeError(
'view-type', yaml_content['view-type'])
raise InvalidAttributeError("view-type", yaml_content["view-type"])
if project:
xml_project = project.root_xml(yaml_content)
else:
xml_project = XML.Element('project')
xml_project = XML.Element("project")
# Generate the XML tree directly with modules/general
pub.gen_xml(xml_project, yaml_content)
# check output file is under correct path
if 'name' in yaml_content:
if "name" in yaml_content:
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = [os.path.normpath(
os.path.join(prefix,
'/'.join(parser._getfullname(yaml_content).
split('/')[:-1])))]
expected_folders = [
os.path.normpath(
os.path.join(
prefix,
"/".join(parser._getfullname(yaml_content).split("/")[:-1]),
)
)
]
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
self.assertEquals(
expected_folders, actual_folders,
"Output file under wrong path, was '%s', should be '%s'" %
(self.out_filenames[0],
os.path.join(expected_folders[0],
os.path.basename(self.out_filenames[0]))))
expected_folders,
actual_folders,
"Output file under wrong path, was '%s', should be '%s'"
% (
self.out_filenames[0],
os.path.join(
expected_folders[0], os.path.basename(self.out_filenames[0])
),
),
)
# Prettify generated XML
pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')
pretty_xml = XmlJob(xml_project, "fixturejob").output().decode("utf-8")
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(expected_xml,
doctest.ELLIPSIS |
doctest.REPORT_NDIFF)
testtools.matchers.DocTestMatches(
expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class SingleJobTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
config = self._get_config()
expected_xml = self._read_utf8_content().strip() \
.replace('<BLANKLINE>', '').replace('\n\n', '\n')
expected_xml = (
self._read_utf8_content()
.strip()
.replace("<BLANKLINE>", "")
.replace("\n\n", "\n")
)
parser = YamlParser(config)
parser.parse(self.in_filename)
@ -259,10 +283,10 @@ class SingleJobTestCase(BaseScenariosTestCase):
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail("plugins-info-filename",
text_content(self.plugins_info_filename))
self.addDetail("plugins-info",
text_content(str(plugins_info)))
self.addDetail(
"plugins-info-filename", text_content(self.plugins_info_filename)
)
self.addDetail("plugins-info", text_content(str(plugins_info)))
registry = ModuleRegistry(config, plugins_info)
registry.set_parser_data(parser.data)
@ -277,53 +301,62 @@ class SingleJobTestCase(BaseScenariosTestCase):
# check reference files are under correct path for folders
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = list(set([
os.path.normpath(
os.path.join(prefix,
'/'.join(job_data['name'].split('/')[:-1])))
for job_data in job_data_list
]))
expected_folders = list(
set(
[
os.path.normpath(
os.path.join(prefix, "/".join(job_data["name"].split("/")[:-1]))
)
for job_data in job_data_list
]
)
)
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
six.assertCountEqual(
self,
expected_folders, actual_folders,
"Output file under wrong path, was '%s', should be '%s'" %
(self.out_filenames[0],
os.path.join(expected_folders[0],
os.path.basename(self.out_filenames[0]))))
expected_folders,
actual_folders,
"Output file under wrong path, was '%s', should be '%s'"
% (
self.out_filenames[0],
os.path.join(
expected_folders[0], os.path.basename(self.out_filenames[0])
),
),
)
# Prettify generated XML
pretty_xml = u"\n".join(job.output().decode('utf-8')
for job in xml_jobs) \
.strip().replace('\n\n', '\n')
pretty_xml = (
u"\n".join(job.output().decode("utf-8") for job in xml_jobs)
.strip()
.replace("\n\n", "\n")
)
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(expected_xml,
doctest.ELLIPSIS |
doctest.REPORT_NDIFF))
testtools.matchers.DocTestMatches(
expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class JsonTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
expected_json = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
pretty_json = json.dumps(yaml_content, indent=4,
separators=(',', ': '))
pretty_json = json.dumps(yaml_content, indent=4, separators=(",", ": "))
self.assertThat(
pretty_json,
testtools.matchers.DocTestMatches(expected_json,
doctest.ELLIPSIS |
doctest.REPORT_NDIFF)
testtools.matchers.DocTestMatches(
expected_json, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class YamlTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
expected_yaml = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
@ -337,7 +370,7 @@ class YamlTestCase(BaseScenariosTestCase):
self.assertThat(
pretty_yaml,
testtools.matchers.DocTestMatches(expected_yaml,
doctest.ELLIPSIS |
doctest.REPORT_NDIFF)
testtools.matchers.DocTestMatches(
expected_yaml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleBuilders(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = builders.Builders

View File

@ -21,28 +21,25 @@ from tests.base import mock
class TestCaseJobCache(base.BaseTestCase):
@mock.patch('jenkins_jobs.builder.JobCache.get_cache_dir',
lambda x: '/bad/file')
@mock.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
def test_save_on_exit(self):
"""
Test that the cache is saved on normal object deletion
"""
with mock.patch('jenkins_jobs.builder.JobCache.save') as save_mock:
with mock.patch('os.path.isfile', return_value=False):
with mock.patch('jenkins_jobs.builder.JobCache._lock'):
with mock.patch("jenkins_jobs.builder.JobCache.save") as save_mock:
with mock.patch("os.path.isfile", return_value=False):
with mock.patch("jenkins_jobs.builder.JobCache._lock"):
jenkins_jobs.builder.JobCache("dummy")
save_mock.assert_called_with()
@mock.patch('jenkins_jobs.builder.JobCache.get_cache_dir',
lambda x: '/bad/file')
@mock.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
def test_cache_file(self):
"""
Test providing a cachefile.
"""
test_file = os.path.abspath(__file__)
with mock.patch('os.path.join', return_value=test_file):
with mock.patch('yaml.load'):
with mock.patch('jenkins_jobs.builder.JobCache._lock'):
with mock.patch("os.path.join", return_value=test_file):
with mock.patch("yaml.load"):
with mock.patch("jenkins_jobs.builder.JobCache._lock"):
jenkins_jobs.builder.JobCache("dummy").data = None

View File

@ -24,51 +24,49 @@ from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class DeleteTests(CmdTestsBase):
@mock.patch('jenkins_jobs.cli.subcommand.update.'
'JenkinsManager.delete_jobs')
@mock.patch('jenkins_jobs.cli.subcommand.update.'
'JenkinsManager.delete_views')
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_jobs")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_views")
def test_delete_single_job(self, delete_job_mock, delete_view_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ['--conf', self.default_config_file, 'delete', 'test_job']
args = ["--conf", self.default_config_file, "delete", "test_job"]
self.execute_jenkins_jobs_with_args(args)
@mock.patch('jenkins_jobs.cli.subcommand.update.'
'JenkinsManager.delete_jobs')
@mock.patch('jenkins_jobs.cli.subcommand.update.'
'JenkinsManager.delete_views')
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_jobs")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_views")
def test_delete_multiple_jobs(self, delete_job_mock, delete_view_mock):
"""
Test handling the deletion of multiple Jenkins jobs.
"""
args = ['--conf', self.default_config_file,
'delete', 'test_job1', 'test_job2']
args = ["--conf", self.default_config_file, "delete", "test_job1", "test_job2"]
self.execute_jenkins_jobs_with_args(args)
@mock.patch('jenkins_jobs.builder.JenkinsManager.delete_job')
@mock.patch("jenkins_jobs.builder.JenkinsManager.delete_job")
def test_delete_using_glob_params(self, delete_job_mock):
"""
Test handling the deletion of multiple Jenkins jobs using the glob
parameters feature.
"""
args = ['--conf', self.default_config_file,
'delete', '--path',
os.path.join(self.fixtures_path,
'cmd-002.yaml'),
'*bar*']
args = [
"--conf",
self.default_config_file,
"delete",
"--path",
os.path.join(self.fixtures_path, "cmd-002.yaml"),
"*bar*",
]
self.execute_jenkins_jobs_with_args(args)
calls = [mock.call('bar001'), mock.call('bar002')]
calls = [mock.call("bar001"), mock.call("bar002")]
delete_job_mock.assert_has_calls(calls, any_order=True)
self.assertEqual(delete_job_mock.call_count, len(calls),
"Jenkins.delete_job() was called '%s' times when "
"expected '%s'" % (delete_job_mock.call_count,
len(calls)))
self.assertEqual(
delete_job_mock.call_count,
len(calls),
"Jenkins.delete_job() was called '%s' times when "
"expected '%s'" % (delete_job_mock.call_count, len(calls)),
)

View File

@ -21,31 +21,27 @@ from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class DeleteAllTests(CmdTestsBase):
@mock.patch('jenkins_jobs.cli.subcommand.update.'
'JenkinsManager.delete_all_jobs')
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_all_jobs")
def test_delete_all_accept(self, delete_job_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ['--conf', self.default_config_file, 'delete-all']
with mock.patch('jenkins_jobs.builder.JenkinsManager.get_views',
return_value=[None]):
with mock.patch('jenkins_jobs.utils.input', return_value="y"):
args = ["--conf", self.default_config_file, "delete-all"]
with mock.patch(
"jenkins_jobs.builder.JenkinsManager.get_views", return_value=[None]
):
with mock.patch("jenkins_jobs.utils.input", return_value="y"):
self.execute_jenkins_jobs_with_args(args)
@mock.patch('jenkins_jobs.cli.subcommand.update.'
'JenkinsManager.delete_all_jobs')
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_all_jobs")
def test_delete_all_abort(self, delete_job_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ['--conf', self.default_config_file, 'delete-all']
with mock.patch('jenkins_jobs.utils.input', return_value="n"):
self.assertRaises(SystemExit,
self.execute_jenkins_jobs_with_args, args)
args = ["--conf", self.default_config_file, "delete-all"]
with mock.patch("jenkins_jobs.utils.input", return_value="n"):
self.assertRaises(SystemExit, self.execute_jenkins_jobs_with_args, args)

View File

@ -21,65 +21,70 @@ from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class ListFromJenkinsTests(TestWithScenarios, CmdTestsBase):
scenarios = [
('single',
dict(jobs=['job1'], globs=[], found=['job1'])),
('multiple',
dict(jobs=['job1', 'job2'], globs=[], found=['job1', 'job2'])),
('multiple_with_glob',
dict(jobs=['job1', 'job2', 'job3'], globs=["job[1-2]"],
found=['job1', 'job2'])),
('multiple_with_multi_glob',
dict(jobs=['job1', 'job2', 'job3', 'job4'],
globs=["job1", "job[24]"],
found=['job1', 'job2', 'job4'])),
("single", dict(jobs=["job1"], globs=[], found=["job1"])),
("multiple", dict(jobs=["job1", "job2"], globs=[], found=["job1", "job2"])),
(
"multiple_with_glob",
dict(
jobs=["job1", "job2", "job3"],
globs=["job[1-2]"],
found=["job1", "job2"],
),
),
(
"multiple_with_multi_glob",
dict(
jobs=["job1", "job2", "job3", "job4"],
globs=["job1", "job[24]"],
found=["job1", "job2", "job4"],
),
),
]
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_jobs')
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_jobs")
def test_list(self, get_jobs_mock):
def _get_jobs():
return [{'name': name} for name in self.jobs]
return [{"name": name} for name in self.jobs]
get_jobs_mock.side_effect = _get_jobs
console_out = io.BytesIO()
args = ['--conf', self.default_config_file, 'list'] + self.globs
args = ["--conf", self.default_config_file, "list"] + self.globs
with mock.patch('sys.stdout', console_out):
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(args)
self.assertEqual(console_out.getvalue().decode('utf-8').rstrip(),
('\n'.join(self.found)))
self.assertEqual(
console_out.getvalue().decode("utf-8").rstrip(), ("\n".join(self.found))
)
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class ListFromYamlTests(TestWithScenarios, CmdTestsBase):
scenarios = [
('all',
dict(globs=[], found=['bam001', 'bar001', 'bar002', 'baz001'])),
('some',
dict(globs=["*am*", "*002", "bar001"],
found=['bam001', 'bar001', 'bar002'])),
("all", dict(globs=[], found=["bam001", "bar001", "bar002", "baz001"])),
(
"some",
dict(
globs=["*am*", "*002", "bar001"], found=["bam001", "bar001", "bar002"]
),
),
]
def test_list(self):
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
console_out = io.BytesIO()
with mock.patch('sys.stdout', console_out):
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(
['--conf',
self.default_config_file,
'list',
'-p',
path] + self.globs)
["--conf", self.default_config_file, "list", "-p", path] + self.globs
)
self.assertEqual(console_out.getvalue().decode('utf-8').rstrip(),
('\n'.join(self.found)))
self.assertEqual(
console_out.getvalue().decode("utf-8").rstrip(), ("\n".join(self.found))
)

View File

@ -35,31 +35,35 @@ from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class TestTests(CmdTestsBase):
def test_non_existing_job(self):
"""
Run test mode and pass a non-existing job name
(probably better to fail here)
"""
args = ['--conf', self.default_config_file, 'test',
os.path.join(self.fixtures_path,
'cmd-001.yaml'),
'invalid']
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"invalid",
]
self.execute_jenkins_jobs_with_args(args)
def test_valid_job(self):
"""
Run test mode and pass a valid job name
"""
args = ['--conf', self.default_config_file, 'test',
os.path.join(self.fixtures_path,
'cmd-001.yaml'),
'foo-job']
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"foo-job",
]
console_out = io.BytesIO()
with mock.patch('sys.stdout', console_out):
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(args)
def test_console_output(self):
@ -68,13 +72,18 @@ class TestTests(CmdTestsBase):
"""
console_out = io.BytesIO()
with mock.patch('sys.stdout', console_out):
args = ['--conf', self.default_config_file, 'test',
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
with mock.patch("sys.stdout", console_out):
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(os.path.join(self.fixtures_path, 'cmd-001.xml'),
'r', encoding='utf-8').read()
self.assertEqual(console_out.getvalue().decode('utf-8'), xml_content)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
self.assertEqual(console_out.getvalue().decode("utf-8"), xml_content)
def test_output_dir(self):
"""
@ -83,11 +92,11 @@ class TestTests(CmdTestsBase):
"""
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
args = ['test', os.path.join(self.fixtures_path, 'cmd-001.yaml'),
'-o', tmpdir]
args = ["test", os.path.join(self.fixtures_path, "cmd-001.yaml"), "-o", tmpdir]
self.execute_jenkins_jobs_with_args(args)
self.expectThat(os.path.join(tmpdir, 'foo-job'),
testtools.matchers.FileExists())
self.expectThat(
os.path.join(tmpdir, "foo-job"), testtools.matchers.FileExists()
)
def test_output_dir_config_xml(self):
"""
@ -96,11 +105,18 @@ class TestTests(CmdTestsBase):
"""
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
args = ['test', os.path.join(self.fixtures_path, 'cmd-001.yaml'),
'-o', tmpdir, '--config-xml']
args = [
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"-o",
tmpdir,
"--config-xml",
]
self.execute_jenkins_jobs_with_args(args)
self.expectThat(os.path.join(tmpdir, 'foo-job', 'config.xml'),
testtools.matchers.FileExists())
self.expectThat(
os.path.join(tmpdir, "foo-job", "config.xml"),
testtools.matchers.FileExists(),
)
def test_stream_input_output_no_encoding_exceed_recursion(self):
"""
@ -109,13 +125,12 @@ class TestTests(CmdTestsBase):
"""
console_out = io.BytesIO()
input_file = os.path.join(self.fixtures_path,
'large-number-of-jobs-001.yaml')
with io.open(input_file, 'r') as f:
with mock.patch('sys.stdout', console_out):
input_file = os.path.join(self.fixtures_path, "large-number-of-jobs-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
console_out.encoding = None
with mock.patch('sys.stdin', f):
args = ['test']
with mock.patch("sys.stdin", f):
args = ["test"]
self.execute_jenkins_jobs_with_args(args)
def test_stream_input_output_utf8_encoding(self):
@ -125,16 +140,17 @@ class TestTests(CmdTestsBase):
"""
console_out = io.BytesIO()
input_file = os.path.join(self.fixtures_path, 'cmd-001.yaml')
with io.open(input_file, 'r') as f:
with mock.patch('sys.stdout', console_out):
with mock.patch('sys.stdin', f):
args = ['--conf', self.default_config_file, 'test']
input_file = os.path.join(self.fixtures_path, "cmd-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(os.path.join(self.fixtures_path, 'cmd-001.xml'),
'r', encoding='utf-8').read()
value = console_out.getvalue().decode('utf-8')
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
value = console_out.getvalue().decode("utf-8")
self.assertEqual(value, xml_content)
def test_stream_input_output_ascii_encoding(self):
@ -143,18 +159,19 @@ class TestTests(CmdTestsBase):
ascii encoding with unicode input
"""
console_out = io.BytesIO()
console_out.encoding = 'ascii'
console_out.encoding = "ascii"
input_file = os.path.join(self.fixtures_path, 'cmd-001.yaml')
with io.open(input_file, 'r') as f:
with mock.patch('sys.stdout', console_out):
with mock.patch('sys.stdin', f):
args = ['--conf', self.default_config_file, 'test']
input_file = os.path.join(self.fixtures_path, "cmd-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(os.path.join(self.fixtures_path, 'cmd-001.xml'),
'r', encoding='utf-8').read()
value = console_out.getvalue().decode('ascii')
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
value = console_out.getvalue().decode("ascii")
self.assertEqual(value, xml_content)
def test_stream_output_ascii_encoding_invalid_char(self):
@ -164,65 +181,65 @@ class TestTests(CmdTestsBase):
that cannot be converted.
"""
console_out = io.BytesIO()
console_out.encoding = 'ascii'
console_out.encoding = "ascii"
input_file = os.path.join(self.fixtures_path, 'unicode001.yaml')
with io.open(input_file, 'r', encoding='utf-8') as f:
with mock.patch('sys.stdout', console_out):
with mock.patch('sys.stdin', f):
args = ['--conf', self.default_config_file, 'test']
input_file = os.path.join(self.fixtures_path, "unicode001.yaml")
with io.open(input_file, "r", encoding="utf-8") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
jenkins_jobs = entry.JenkinsJobs(args)
e = self.assertRaises(UnicodeError, jenkins_jobs.execute)
self.assertIn("'ascii' codec can't encode character", str(e))
@mock.patch(
'jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML')
@mock.patch('jenkins_jobs.cli.subcommand.update.ModuleRegistry')
@mock.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
@mock.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
def test_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Test handling of plugins_info stub option.
"""
plugins_info_stub_yaml_file = os.path.join(self.fixtures_path,
'plugins-info.yaml')
args = ['--conf',
os.path.join(self.fixtures_path, 'cmd-001.conf'),
'test',
'-p',
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
plugins_info_stub_yaml_file = os.path.join(
self.fixtures_path, "plugins-info.yaml"
)
args = [
"--conf",
os.path.join(self.fixtures_path, "cmd-001.conf"),
"test",
"-p",
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
with io.open(plugins_info_stub_yaml_file,
'r', encoding='utf-8') as yaml_file:
with io.open(plugins_info_stub_yaml_file, "r", encoding="utf-8") as yaml_file:
plugins_info_list = yaml.load(yaml_file)
registry_mock.assert_called_with(mock.ANY,
plugins_info_list)
registry_mock.assert_called_with(mock.ANY, plugins_info_list)
@mock.patch(
'jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML')
@mock.patch('jenkins_jobs.cli.subcommand.update.ModuleRegistry')
def test_bogus_plugins_info_stub_option(self, registry_mock,
generateXML_mock):
@mock.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
@mock.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
def test_bogus_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
plugins_info_stub_yaml_file = os.path.join(self.fixtures_path,
'bogus-plugins-info.yaml')
args = ['--conf',
os.path.join(self.fixtures_path, 'cmd-001.conf'),
'test',
'-p',
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
plugins_info_stub_yaml_file = os.path.join(
self.fixtures_path, "bogus-plugins-info.yaml"
)
args = [
"--conf",
os.path.join(self.fixtures_path, "cmd-001.conf"),
"test",
"-p",
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
stderr = StringIO()
with mock.patch('sys.stderr', stderr):
with mock.patch("sys.stderr", stderr):
self.assertRaises(SystemExit, entry.JenkinsJobs, args)
self.assertIn("must contain a Yaml list",
stderr.getvalue())
self.assertIn("must contain a Yaml list", stderr.getvalue())
class TestJenkinsGetPluginInfoError(CmdTestsBase):
@ -231,9 +248,8 @@ class TestJenkinsGetPluginInfoError(CmdTestsBase):
jenkins_jobs.builder.JenkinsManager.get_plugins_info
"""
@mock.patch('jenkins.Jenkins.get_plugins')
def test_console_output_jenkins_connection_failure_warning(
self, get_plugins_mock):
@mock.patch("jenkins.Jenkins.get_plugins")
def test_console_output_jenkins_connection_failure_warning(self, get_plugins_mock):
"""
Run test mode and verify that failed Jenkins connection attempt
exception does not bubble out of cmd.main. Ideally, we would also test
@ -242,44 +258,51 @@ class TestJenkinsGetPluginInfoError(CmdTestsBase):
suite.
"""
get_plugins_mock.side_effect = \
jenkins.JenkinsException("Connection refused")
with mock.patch('sys.stdout'):
get_plugins_mock.side_effect = jenkins.JenkinsException("Connection refused")
with mock.patch("sys.stdout"):
try:
args = ['--conf', self.default_config_file, 'test',
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
except jenkins.JenkinsException:
self.fail("jenkins.JenkinsException propagated to main")
except Exception:
pass # only care about jenkins.JenkinsException for now
@mock.patch('jenkins.Jenkins.get_plugins')
def test_skip_plugin_retrieval_if_no_config_provided(
self, get_plugins_mock):
@mock.patch("jenkins.Jenkins.get_plugins")
def test_skip_plugin_retrieval_if_no_config_provided(self, get_plugins_mock):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if no config file provided.
"""
with mock.patch('sys.stdout', new_callable=io.BytesIO):
args = ['--conf', self.default_config_file, 'test',
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
with mock.patch("sys.stdout", new_callable=io.BytesIO):
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
self.assertFalse(get_plugins_mock.called)
@mock.patch('jenkins.Jenkins.get_plugins_info')
@mock.patch("jenkins.Jenkins.get_plugins_info")
def test_skip_plugin_retrieval_if_disabled(self, get_plugins_mock):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if a config file provided and disables
querying through a config option.
"""
with mock.patch('sys.stdout', new_callable=io.BytesIO):
args = ['--conf',
os.path.join(self.fixtures_path,
'disable-query-plugins.conf'),
'test',
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
with mock.patch("sys.stdout", new_callable=io.BytesIO):
args = [
"--conf",
os.path.join(self.fixtures_path, "disable-query-plugins.conf"),
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
self.assertFalse(get_plugins_mock.called)
@ -291,8 +314,8 @@ class MatchesDirMissingFilesMismatch(object):
def describe(self):
return "{0} and {1} contain different files".format(
self.left_directory,
self.right_directory)
self.left_directory, self.right_directory
)
def get_details(self):
return {}
@ -308,11 +331,15 @@ class MatchesDirFileContentsMismatch(object):
right_contents = open(self.right_file).readlines()
return "{0} is not equal to {1}:\n{2}".format(
difflib.unified_diff(left_contents, right_contents,
fromfile=self.left_file,
tofile=self.right_file),
difflib.unified_diff(
left_contents,
right_contents,
fromfile=self.left_file,
tofile=self.right_file,
),
self.left_file,
self.right_file)
self.right_file,
)
def get_details(self):
return {}
@ -337,8 +364,7 @@ class MatchesDir(object):
other_files.sort()
if self.__files != other_files:
return MatchesDirMissingFilesMismatch(self.__directory,
other_directory)
return MatchesDirMissingFilesMismatch(self.__directory, other_directory)
for i, file in enumerate(self.__files):
my_file = os.path.join(self.__directory, file)
@ -349,16 +375,15 @@ class MatchesDir(object):
return None
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class TestTestsMultiPath(CmdTestsBase):
def setUp(self):
super(TestTestsMultiPath, self).setUp()
path_list = [os.path.join(self.fixtures_path,
'multi-path/yamldirs/', p)
for p in ['dir1', 'dir2']]
path_list = [
os.path.join(self.fixtures_path, "multi-path/yamldirs/", p)
for p in ["dir1", "dir2"]
]
self.multipath = os.pathsep.join(path_list)
self.output_dir = tempfile.mkdtemp()
@ -374,45 +399,74 @@ class TestTestsMultiPath(CmdTestsBase):
"""
Run test mode and pass multiple paths.
"""
args = ['--conf', self.default_config_file, 'test',
'-o', self.output_dir, self.multipath]
args = [
"--conf",
self.default_config_file,
"test",
"-o",
self.output_dir,
self.multipath,
]
self.execute_jenkins_jobs_with_args(args)
self.check_dirs_match(os.path.join(self.fixtures_path,
'multi-path/output_simple'))
self.check_dirs_match(
os.path.join(self.fixtures_path, "multi-path/output_simple")
)
def test_recursive_multi_path_command_line(self):
"""
Run test mode and pass multiple paths with recursive path option.
"""
args = ['--conf', self.default_config_file, 'test',
'-o', self.output_dir, '-r', self.multipath]
args = [
"--conf",
self.default_config_file,
"test",
"-o",
self.output_dir,
"-r",
self.multipath,
]
self.execute_jenkins_jobs_with_args(args)
self.check_dirs_match(os.path.join(self.fixtures_path,
'multi-path/output_recursive'))
self.check_dirs_match(
os.path.join(self.fixtures_path, "multi-path/output_recursive")
)
def test_recursive_multi_path_config_file(self):
# test recursive set in configuration file
args = ['--conf', os.path.join(self.fixtures_path,
'multi-path/builder-recursive.ini'),
'test', '-o', self.output_dir, self.multipath]
args = [
"--conf",
os.path.join(self.fixtures_path, "multi-path/builder-recursive.ini"),
"test",
"-o",
self.output_dir,
self.multipath,
]
self.execute_jenkins_jobs_with_args(args)
self.check_dirs_match(os.path.join(self.fixtures_path,
'multi-path/output_recursive'))
self.check_dirs_match(
os.path.join(self.fixtures_path, "multi-path/output_recursive")
)
def test_recursive_multi_path_with_excludes(self):
"""
Run test mode and pass multiple paths with recursive path option.
"""
exclude_path = os.path.join(self.fixtures_path,
'multi-path/yamldirs/dir2/dir1')
args = ['--conf', self.default_config_file, 'test',
'-x', exclude_path,
'-o', self.output_dir,
'-r', self.multipath]
exclude_path = os.path.join(self.fixtures_path, "multi-path/yamldirs/dir2/dir1")
args = [
"--conf",
self.default_config_file,
"test",
"-x",
exclude_path,
"-o",
self.output_dir,
"-r",
self.multipath,
]
self.execute_jenkins_jobs_with_args(args)
self.check_dirs_match(
os.path.join(self.fixtures_path,
'multi-path/output_recursive_with_excludes'))
os.path.join(
self.fixtures_path, "multi-path/output_recursive_with_excludes"
)
)

View File

@ -25,61 +25,60 @@ from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class UpdateTests(CmdTestsBase):
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.job_exists')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.reconfig_job')
def test_update_jobs(self,
jenkins_reconfig_job,
jenkins_get_jobs,
jenkins_job_exists, ):
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.job_exists")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.reconfig_job")
def test_update_jobs(
self, jenkins_reconfig_job, jenkins_get_jobs, jenkins_job_exists
):
"""
Test update_job is called
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', path]
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", self.default_config_file, "update", path]
self.execute_jenkins_jobs_with_args(args)
jenkins_reconfig_job.assert_has_calls(
[mock.call(job_name, mock.ANY)
for job_name in ['bar001', 'bar002', 'baz001', 'bam001']],
any_order=True
[
mock.call(job_name, mock.ANY)
for job_name in ["bar001", "bar002", "baz001", "bam001"]
],
any_order=True,
)
@mock.patch('jenkins_jobs.builder.JenkinsManager.is_job',
return_value=True)
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_jobs')
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_job_md5')
@mock.patch('jenkins_jobs.builder.JenkinsManager.update_job')
def test_update_jobs_decode_job_output(self, update_job_mock,
get_job_md5_mock, get_jobs_mock,
is_job_mock):
@mock.patch("jenkins_jobs.builder.JenkinsManager.is_job", return_value=True)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_jobs")
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_job_md5")
@mock.patch("jenkins_jobs.builder.JenkinsManager.update_job")
def test_update_jobs_decode_job_output(
self, update_job_mock, get_job_md5_mock, get_jobs_mock, is_job_mock
):
"""
Test that job xml output has been decoded before attempting to update
"""
# don't care about the value returned here
update_job_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', path]
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", self.default_config_file, "update", path]
self.execute_jenkins_jobs_with_args(args)
self.assertTrue(isinstance(update_job_mock.call_args[0][1],
six.text_type))
self.assertTrue(isinstance(update_job_mock.call_args[0][1], six.text_type))
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.job_exists')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.reconfig_job')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.delete_job')
def test_update_jobs_and_delete_old(self,
jenkins_delete_job,
jenkins_reconfig_job,
jenkins_get_all_jobs,
jenkins_job_exists):
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.job_exists")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.reconfig_job")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.delete_job")
def test_update_jobs_and_delete_old(
self,
jenkins_delete_job,
jenkins_reconfig_job,
jenkins_get_all_jobs,
jenkins_job_exists,
):
"""
Test update behaviour with --delete-old option
@ -92,25 +91,26 @@ class UpdateTests(CmdTestsBase):
* mock out a call to jenkins.Jenkins.job_exists() to always return
True.
"""
yaml_jobs = ['bar001', 'bar002', 'baz001', 'bam001']
extra_jobs = ['old_job001', 'old_job002', 'unmanaged']
yaml_jobs = ["bar001", "bar002", "baz001", "bam001"]
extra_jobs = ["old_job001", "old_job002", "unmanaged"]
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', '--delete-old',
path]
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", self.default_config_file, "update", "--delete-old", path]
jenkins_get_all_jobs.return_value = [
{'fullname': name} for name in yaml_jobs + extra_jobs]
{"fullname": name} for name in yaml_jobs + extra_jobs
]
with mock.patch('jenkins_jobs.builder.JenkinsManager.is_managed',
side_effect=(lambda name: name != 'unmanaged')):
with mock.patch(
"jenkins_jobs.builder.JenkinsManager.is_managed",
side_effect=(lambda name: name != "unmanaged"),
):
self.execute_jenkins_jobs_with_args(args)
jenkins_reconfig_job.assert_has_calls(
[mock.call(job_name, mock.ANY) for job_name in yaml_jobs],
any_order=True
[mock.call(job_name, mock.ANY) for job_name in yaml_jobs], any_order=True
)
calls = [mock.call(name) for name in extra_jobs if name != 'unmanaged']
calls = [mock.call(name) for name in extra_jobs if name != "unmanaged"]
jenkins_delete_job.assert_has_calls(calls)
# to ensure only the calls we expected were made, have to check
# there were no others, as no API call for assert_has_only_calls

View File

@ -7,7 +7,7 @@ from tests.base import mock
class CmdTestsBase(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
def setUp(self):
super(CmdTestsBase, self).setUp()
@ -17,13 +17,11 @@ class CmdTestsBase(base.BaseTestCase):
# are run in parallel. Stub out the JobCache to ensure that each
# test can safely create the cache directory without risk of
# interference.
cache_patch = mock.patch('jenkins_jobs.builder.JobCache',
autospec=True)
cache_patch = mock.patch("jenkins_jobs.builder.JobCache", autospec=True)
self.cache_mock = cache_patch.start()
self.addCleanup(cache_patch.stop)
self.default_config_file = os.path.join(self.fixtures_path,
'empty_builder.ini')
self.default_config_file = os.path.join(self.fixtures_path, "empty_builder.ini")
def execute_jenkins_jobs_with_args(self, args):
jenkins_jobs = entry.JenkinsJobs(args)
@ -31,10 +29,9 @@ class CmdTestsBase(base.BaseTestCase):
class TestCmd(CmdTestsBase):
def test_with_empty_args(self):
"""
User passes no args, should fail with SystemExit
"""
with mock.patch('sys.stderr'):
with mock.patch("sys.stderr"):
self.assertRaises(SystemExit, entry.JenkinsJobs, [])

View File

@ -9,25 +9,25 @@ from jenkins_jobs.cli import entry
from jenkins_jobs import builder
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class TestConfigs(CmdTestsBase):
global_conf = '/etc/jenkins_jobs/jenkins_jobs.ini'
user_conf = os.path.join(os.path.expanduser('~'), '.config',
'jenkins_jobs', 'jenkins_jobs.ini')
local_conf = os.path.join(os.path.dirname(__file__),
'jenkins_jobs.ini')
global_conf = "/etc/jenkins_jobs/jenkins_jobs.ini"
user_conf = os.path.join(
os.path.expanduser("~"), ".config", "jenkins_jobs", "jenkins_jobs.ini"
)
local_conf = os.path.join(os.path.dirname(__file__), "jenkins_jobs.ini")
def test_use_global_config(self):
"""
Verify that JJB uses the global config file by default
"""
args = ['test', 'foo']
conffp = io.open(self.default_config_file, 'r', encoding='utf-8')
args = ["test", "foo"]
conffp = io.open(self.default_config_file, "r", encoding="utf-8")
with patch("os.path.isfile", return_value=True) as m_isfile:
with patch('os.path.isfile', return_value=True) as m_isfile:
def side_effect(path):
if path == self.global_conf:
return True
@ -35,36 +35,35 @@ class TestConfigs(CmdTestsBase):
m_isfile.side_effect = side_effect
with patch('io.open', return_value=conffp) as m_open:
with patch("io.open", return_value=conffp) as m_open:
entry.JenkinsJobs(args, config_file_required=True)
m_open.assert_called_with(self.global_conf, 'r',
encoding='utf-8')
m_open.assert_called_with(self.global_conf, "r", encoding="utf-8")
def test_use_config_in_user_home(self):
"""
Verify that JJB uses config file in user home folder
"""
args = ['test', 'foo']
args = ["test", "foo"]
conffp = io.open(self.default_config_file, "r", encoding="utf-8")
with patch("os.path.isfile", return_value=True) as m_isfile:
conffp = io.open(self.default_config_file, 'r', encoding='utf-8')
with patch('os.path.isfile', return_value=True) as m_isfile:
def side_effect(path):
if path == self.user_conf:
return True
return False
m_isfile.side_effect = side_effect
with patch('io.open', return_value=conffp) as m_open:
with patch("io.open", return_value=conffp) as m_open:
entry.JenkinsJobs(args, config_file_required=True)
m_open.assert_called_with(self.user_conf, 'r',
encoding='utf-8')
m_open.assert_called_with(self.user_conf, "r", encoding="utf-8")
def test_non_existing_config_dir(self):
"""
Run test mode and pass a non-existing configuration directory
"""
args = ['--conf', self.default_config_file, 'test', 'foo']
args = ["--conf", self.default_config_file, "test", "foo"]
jenkins_jobs = entry.JenkinsJobs(args)
self.assertRaises(IOError, jenkins_jobs.execute)
@ -72,8 +71,7 @@ class TestConfigs(CmdTestsBase):
"""
Run test mode and pass a non-existing configuration file
"""
args = ['--conf', self.default_config_file, 'test',
'non-existing.yaml']
args = ["--conf", self.default_config_file, "test", "non-existing.yaml"]
jenkins_jobs = entry.JenkinsJobs(args)
self.assertRaises(IOError, jenkins_jobs.execute)
@ -82,37 +80,42 @@ class TestConfigs(CmdTestsBase):
Run test mode and check config settings from conf file retained
when none of the global CLI options are set.
"""
config_file = os.path.join(self.fixtures_path,
'settings_from_config.ini')
args = ['--conf', config_file, 'test', 'dummy.yaml']
config_file = os.path.join(self.fixtures_path, "settings_from_config.ini")
args = ["--conf", config_file, "test", "dummy.yaml"]
jenkins_jobs = entry.JenkinsJobs(args)
jjb_config = jenkins_jobs.jjb_config
self.assertEqual(jjb_config.jenkins['user'], "jenkins_user")
self.assertEqual(jjb_config.jenkins['password'], "jenkins_password")
self.assertEqual(jjb_config.builder['ignore_cache'], True)
self.assertEqual(jjb_config.builder['flush_cache'], True)
self.assertEqual(jjb_config.builder['update'], "all")
self.assertEqual(
jjb_config.yamlparser['allow_empty_variables'], True)
self.assertEqual(jjb_config.jenkins["user"], "jenkins_user")
self.assertEqual(jjb_config.jenkins["password"], "jenkins_password")
self.assertEqual(jjb_config.builder["ignore_cache"], True)
self.assertEqual(jjb_config.builder["flush_cache"], True)
self.assertEqual(jjb_config.builder["update"], "all")
self.assertEqual(jjb_config.yamlparser["allow_empty_variables"], True)
def test_config_options_overriden_by_cli(self):
"""
Run test mode and check config settings from conf file retained
when none of the global CLI options are set.
"""
args = ['--user', 'myuser', '--password', 'mypassword',
'--ignore-cache', '--flush-cache', '--allow-empty-variables',
'test', 'dummy.yaml']
args = [
"--user",
"myuser",
"--password",
"mypassword",
"--ignore-cache",
"--flush-cache",
"--allow-empty-variables",
"test",
"dummy.yaml",
]
jenkins_jobs = entry.JenkinsJobs(args)
jjb_config = jenkins_jobs.jjb_config
self.assertEqual(jjb_config.jenkins['user'], "myuser")
self.assertEqual(jjb_config.jenkins['password'], "mypassword")
self.assertEqual(jjb_config.builder['ignore_cache'], True)
self.assertEqual(jjb_config.builder['flush_cache'], True)
self.assertEqual(
jjb_config.yamlparser['allow_empty_variables'], True)
self.assertEqual(jjb_config.jenkins["user"], "myuser")
self.assertEqual(jjb_config.jenkins["password"], "mypassword")
self.assertEqual(jjb_config.builder["ignore_cache"], True)
self.assertEqual(jjb_config.builder["flush_cache"], True)
self.assertEqual(jjb_config.yamlparser["allow_empty_variables"], True)
@mock.patch('jenkins_jobs.cli.subcommand.update.JenkinsManager')
@mock.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager")
def test_update_timeout_not_set(self, jenkins_mock):
"""Check that timeout is left unset
@ -120,8 +123,8 @@ class TestConfigs(CmdTestsBase):
provided via the config option.
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', path]
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", self.default_config_file, "update", path]
jenkins_mock.return_value.update_jobs.return_value = ([], 0)
jenkins_mock.return_value.update_views.return_value = ([], 0)
@ -131,10 +134,9 @@ class TestConfigs(CmdTestsBase):
# contains the expected timeout value.
jjb_config = jenkins_mock.call_args[0][0]
self.assertEqual(jjb_config.jenkins['timeout'],
builder._DEFAULT_TIMEOUT)
self.assertEqual(jjb_config.jenkins["timeout"], builder._DEFAULT_TIMEOUT)
@mock.patch('jenkins_jobs.cli.subcommand.update.JenkinsManager')
@mock.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager")
def test_update_timeout_set(self, jenkins_mock):
"""Check that timeout is set correctly
@ -142,10 +144,9 @@ class TestConfigs(CmdTestsBase):
provided via the config option.
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
config_file = os.path.join(self.fixtures_path,
'non-default-timeout.ini')
args = ['--conf', config_file, 'update', path]
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
config_file = os.path.join(self.fixtures_path, "non-default-timeout.ini")
args = ["--conf", config_file, "update", path]
jenkins_mock.return_value.update_jobs.return_value = ([], 0)
jenkins_mock.return_value.update_views.return_value = ([], 0)
@ -155,4 +156,4 @@ class TestConfigs(CmdTestsBase):
# contains the expected timeout value.
jjb_config = jenkins_mock.call_args[0][0]
self.assertEqual(jjb_config.jenkins['timeout'], 0.2)
self.assertEqual(jjb_config.jenkins["timeout"], 0.2)

View File

@ -22,6 +22,7 @@ def fake_os_walk(paths):
new_path = "/".join([top, name])
for x in os_walk(new_path, topdown):
yield x
return os_walk
@ -29,10 +30,9 @@ def fake_os_walk(paths):
# attempting to create the cache directory multiple times as the tests
# are run in parallel. Stub out the JobCache to ensure that each
# test can safely create the object without effect.
@mock.patch('jenkins_jobs.builder.JobCache', mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JobCache", mock.MagicMock)
class CmdRecursePath(testtools.TestCase):
@mock.patch('jenkins_jobs.utils.os.walk')
@mock.patch("jenkins_jobs.utils.os.walk")
def test_recursive_path_option_exclude_pattern(self, oswalk_mock):
"""
Test paths returned by the recursive processing when using pattern
@ -48,21 +48,21 @@ class CmdRecursePath(testtools.TestCase):
"""
os_walk_paths = [
('/jjb_configs', (['dir1', 'dir2', 'dir3', 'test3'], ())),
('/jjb_configs/dir1', (['test1'], ('file'))),
('/jjb_configs/dir2', (['test2'], ())),
('/jjb_configs/dir3', (['bar'], ())),
('/jjb_configs/dir3/bar', ([], ())),
('/jjb_configs/test3/bar', None),
('/jjb_configs/test3/baz', None)
("/jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())),
("/jjb_configs/dir1", (["test1"], ("file"))),
("/jjb_configs/dir2", (["test2"], ())),
("/jjb_configs/dir3", (["bar"], ())),
("/jjb_configs/dir3/bar", ([], ())),
("/jjb_configs/test3/bar", None),
("/jjb_configs/test3/baz", None),
]
paths = [k for k, v in os_walk_paths if v is not None]
oswalk_mock.side_effect = fake_os_walk(os_walk_paths)
self.assertEqual(paths, utils.recurse_path('/jjb_configs', ['test*']))
self.assertEqual(paths, utils.recurse_path("/jjb_configs", ["test*"]))
@mock.patch('jenkins_jobs.utils.os.walk')
@mock.patch("jenkins_jobs.utils.os.walk")
def test_recursive_path_option_exclude_absolute(self, oswalk_mock):
"""
Test paths returned by the recursive processing when using absolute
@ -78,25 +78,26 @@ class CmdRecursePath(testtools.TestCase):
"""
os_walk_paths = [
('/jjb_configs', (['dir1', 'dir2', 'dir3', 'test3'], ())),
('/jjb_configs/dir1', None),
('/jjb_configs/dir2', (['test2'], ())),
('/jjb_configs/dir3', (['bar'], ())),
('/jjb_configs/test3', (['bar', 'baz'], ())),
('/jjb_configs/dir2/test2', ([], ())),
('/jjb_configs/dir3/bar', ([], ())),
('/jjb_configs/test3/bar', ([], ())),
('/jjb_configs/test3/baz', ([], ()))
("/jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())),
("/jjb_configs/dir1", None),
("/jjb_configs/dir2", (["test2"], ())),
("/jjb_configs/dir3", (["bar"], ())),
("/jjb_configs/test3", (["bar", "baz"], ())),
("/jjb_configs/dir2/test2", ([], ())),
("/jjb_configs/dir3/bar", ([], ())),
("/jjb_configs/test3/bar", ([], ())),
("/jjb_configs/test3/baz", ([], ())),
]
paths = [k for k, v in os_walk_paths if v is not None]
oswalk_mock.side_effect = fake_os_walk(os_walk_paths)
self.assertEqual(paths, utils.recurse_path('/jjb_configs',
['/jjb_configs/dir1']))
self.assertEqual(
paths, utils.recurse_path("/jjb_configs", ["/jjb_configs/dir1"])
)
@mock.patch('jenkins_jobs.utils.os.walk')
@mock.patch("jenkins_jobs.utils.os.walk")
def test_recursive_path_option_exclude_relative(self, oswalk_mock):
"""
Test paths returned by the recursive processing when using relative
@ -112,25 +113,27 @@ class CmdRecursePath(testtools.TestCase):
"""
os_walk_paths = [
('jjb_configs', (['dir1', 'dir2', 'dir3', 'test3'], ())),
('jjb_configs/dir1', (['test'], ('file'))),
('jjb_configs/dir2', (['test2'], ())),
('jjb_configs/dir3', (['bar'], ())),
('jjb_configs/test3', (['bar', 'baz'], ())),
('jjb_configs/dir1/test', ([], ())),
('jjb_configs/dir2/test2', ([], ())),
('jjb_configs/dir3/bar', ([], ())),
('jjb_configs/test3/bar', None),
('jjb_configs/test3/baz', ([], ()))
("jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())),
("jjb_configs/dir1", (["test"], ("file"))),
("jjb_configs/dir2", (["test2"], ())),
("jjb_configs/dir3", (["bar"], ())),
("jjb_configs/test3", (["bar", "baz"], ())),
("jjb_configs/dir1/test", ([], ())),
("jjb_configs/dir2/test2", ([], ())),
("jjb_configs/dir3/bar", ([], ())),
("jjb_configs/test3/bar", None),
("jjb_configs/test3/baz", ([], ())),
]
rel_os_walk_paths = [
(os.path.abspath(
os.path.join(os.path.curdir, k)), v) for k, v in os_walk_paths]
(os.path.abspath(os.path.join(os.path.curdir, k)), v)
for k, v in os_walk_paths
]
paths = [k for k, v in rel_os_walk_paths if v is not None]
oswalk_mock.side_effect = fake_os_walk(rel_os_walk_paths)
self.assertEqual(paths, utils.recurse_path('jjb_configs',
['jjb_configs/test3/bar']))
self.assertEqual(
paths, utils.recurse_path("jjb_configs", ["jjb_configs/test3/bar"])
)

View File

@ -23,10 +23,10 @@ from tests.base import mock
class TestCaseModuleDuplicates(base.SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
@mock.patch('jenkins_jobs.builder.logger', autospec=True)
@mock.patch("jenkins_jobs.builder.logger", autospec=True)
def test_yaml_snippet(self, mock_logger):
if os.path.basename(self.in_filename).startswith("exception_"):

View File

@ -16,19 +16,19 @@ def dispatch(exc, *args):
def gen_xml(exc, *args):
data = {'module': 'data'} # noqa
data = {"module": "data"} # noqa
raise exc(*args)
class TestInvalidAttributeError(base.BaseTestCase):
def test_no_valid_values(self):
# When given no valid values, InvalidAttributeError simply displays a
# message indicating the invalid value, the component type, the
# component name, and the attribute name.
message = "'{0}' is an invalid value for attribute {1}.{2}".format(
"fnord", "type.name", "fubar")
"fnord", "type.name", "fubar"
)
with ExpectedException(errors.InvalidAttributeError, message):
dispatch(errors.InvalidAttributeError, "fubar", "fnord")
@ -37,46 +37,49 @@ class TestInvalidAttributeError(base.BaseTestCase):
# indicating the invalid value, the component type, the component name,
# and the attribute name; additionally, it lists the valid values for
# the current component type & name.
valid_values = ['herp', 'derp']
valid_values = ["herp", "derp"]
message = "'{0}' is an invalid value for attribute {1}.{2}".format(
"fnord", "type.name", "fubar")
"fnord", "type.name", "fubar"
)
message += "\nValid values include: {0}".format(
', '.join("'{0}'".format(value) for value in valid_values))
", ".join("'{0}'".format(value) for value in valid_values)
)
with ExpectedException(errors.InvalidAttributeError, message):
dispatch(errors.InvalidAttributeError, "fubar", "fnord",
valid_values)
dispatch(errors.InvalidAttributeError, "fubar", "fnord", valid_values)
class TestMissingAttributeError(base.BaseTestCase):
def test_with_single_missing_attribute(self):
# When passed a single missing attribute, display a message indicating
# * the missing attribute
# * which component type and component name is missing it.
missing_attribute = 'herp'
missing_attribute = "herp"
message = "Missing {0} from an instance of '{1}'".format(
missing_attribute, 'type.name')
missing_attribute, "type.name"
)
with ExpectedException(errors.MissingAttributeError, message):
dispatch(errors.MissingAttributeError, missing_attribute)
with ExpectedException(errors.MissingAttributeError,
message.replace('type.name', 'module')):
with ExpectedException(
errors.MissingAttributeError, message.replace("type.name", "module")
):
gen_xml(errors.MissingAttributeError, missing_attribute)
def test_with_multiple_missing_attributes(self):
# When passed multiple missing attributes, display a message indicating
# * the missing attributes
# * which component type and component name is missing it.
missing_attribute = ['herp', 'derp']
missing_attribute = ["herp", "derp"]
message = "One of {0} must be present in '{1}'".format(
', '.join("'{0}'".format(value) for value in missing_attribute),
'type.name')
", ".join("'{0}'".format(value) for value in missing_attribute), "type.name"
)
with ExpectedException(errors.MissingAttributeError, message):
dispatch(errors.MissingAttributeError, missing_attribute)
with ExpectedException(errors.MissingAttributeError,
message.replace('type.name', 'module')):
with ExpectedException(
errors.MissingAttributeError, message.replace("type.name", "module")
):
gen_xml(errors.MissingAttributeError, missing_attribute)

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleGeneral(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = general.General

View File

@ -19,6 +19,6 @@ from tests import base
class TestCaseModulePublishers(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = hipchat_notif.HipChat

View File

@ -21,66 +21,65 @@ from tests.base import mock
_plugins_info = {}
_plugins_info['plugin1'] = {'longName': '',
'shortName': '',
'version': ''}
_plugins_info["plugin1"] = {"longName": "", "shortName": "", "version": ""}
@mock.patch('jenkins_jobs.builder.JobCache', mock.MagicMock)
@mock.patch("jenkins_jobs.builder.JobCache", mock.MagicMock)
class TestCaseTestJenkinsManager(base.BaseTestCase):
def setUp(self):
super(TestCaseTestJenkinsManager, self).setUp()
self.jjb_config = JJBConfig()
self.jjb_config.validate()
def test_plugins_list(self):
self.jjb_config.builder['plugins_info'] = _plugins_info
self.jjb_config.builder["plugins_info"] = _plugins_info
self.builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config)
self.assertEqual(self.builder.plugins_list, _plugins_info)
@mock.patch.object(jenkins_jobs.builder.jenkins.Jenkins,
'get_plugins',
return_value=_plugins_info)
@mock.patch.object(
jenkins_jobs.builder.jenkins.Jenkins, "get_plugins", return_value=_plugins_info
)
def test_plugins_list_from_jenkins(self, jenkins_mock):
# Trigger fetching the plugins from jenkins when accessing the property
self.jjb_config.builder['plugins_info'] = {}
self.jjb_config.builder["plugins_info"] = {}
self.builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config)
# See https://github.com/formiaczek/multi_key_dict/issues/17
# self.assertEqual(self.builder.plugins_list, k)
for key_tuple in self.builder.plugins_list.keys():
for key in key_tuple:
self.assertEqual(self.builder.plugins_list[key],
_plugins_info[key])
self.assertEqual(self.builder.plugins_list[key], _plugins_info[key])
def test_delete_managed(self):
self.jjb_config.builder['plugins_info'] = {}
self.jjb_config.builder["plugins_info"] = {}
self.builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config)
with mock.patch.multiple('jenkins_jobs.builder.JenkinsManager',
get_jobs=mock.DEFAULT,
is_job=mock.DEFAULT,
is_managed=mock.DEFAULT,
delete_job=mock.DEFAULT) as patches:
patches['get_jobs'].return_value = [{'fullname': 'job1'},
{'fullname': 'job2'}]
patches['is_managed'].side_effect = [True, True]
patches['is_job'].side_effect = [True, True]
with mock.patch.multiple(
"jenkins_jobs.builder.JenkinsManager",
get_jobs=mock.DEFAULT,
is_job=mock.DEFAULT,
is_managed=mock.DEFAULT,
delete_job=mock.DEFAULT,
) as patches:
patches["get_jobs"].return_value = [
{"fullname": "job1"},
{"fullname": "job2"},
]
patches["is_managed"].side_effect = [True, True]
patches["is_job"].side_effect = [True, True]
self.builder.delete_old_managed()
self.assertEqual(patches['delete_job'].call_count, 2)
self.assertEqual(patches["delete_job"].call_count, 2)
def _get_plugins_info_error_test(self, error_string):
builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config)
exception = jenkins_jobs.builder.jenkins.JenkinsException(error_string)
with mock.patch.object(builder.jenkins, 'get_plugins',
side_effect=exception):
with mock.patch.object(builder.jenkins, "get_plugins", side_effect=exception):
plugins_info = builder.get_plugins_info()
self.assertEqual([_plugins_info['plugin1']], plugins_info)
self.assertEqual([_plugins_info["plugin1"]], plugins_info)
def test_get_plugins_info_handles_connectionrefused_errors(self):
self._get_plugins_info_error_test('Connection refused')
self._get_plugins_info_error_test("Connection refused")
def test_get_plugins_info_handles_forbidden_errors(self):
self._get_plugins_info_error_test('Forbidden')
self._get_plugins_info_error_test("Forbidden")

View File

@ -21,5 +21,5 @@ from tests import base
class TestCaseModuleJsonParser(base.SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = base.get_scenarios(fixtures_path, in_ext='json', out_ext='xml')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path, in_ext="json", out_ext="xml")

View File

@ -34,15 +34,16 @@ class TestCaseLocalYamlInclude(base.JsonTestCase):
Verify application specific tags independently of any changes to
modules XML parsing behaviour
"""
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = base.get_scenarios(fixtures_path, 'yaml', 'json',
filter_func=_exclude_scenarios)
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(
fixtures_path, "yaml", "json", filter_func=_exclude_scenarios
)
def test_yaml_snippet(self):
if os.path.basename(self.in_filename).startswith("exception_"):
with ExpectedException(ComposerError,
"^found duplicate anchor .*"):
with ExpectedException(ComposerError, "^found duplicate anchor .*"):
super(TestCaseLocalYamlInclude, self).test_yaml_snippet()
else:
super(TestCaseLocalYamlInclude, self).test_yaml_snippet()
@ -53,13 +54,14 @@ class TestCaseLocalYamlAnchorAlias(base.YamlTestCase):
Verify yaml input is expanded to the expected yaml output when using yaml
anchors and aliases.
"""
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = base.get_scenarios(fixtures_path, 'iyaml', 'oyaml')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path, "iyaml", "oyaml")
class TestCaseLocalYamlIncludeAnchors(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
def test_multiple_same_anchor_in_multiple_toplevel_yaml(self):
"""
@ -70,14 +72,16 @@ class TestCaseLocalYamlIncludeAnchors(base.BaseTestCase):
are treated by the yaml loader as independent.
"""
files = ["custom_same_anchor-001-part1.yaml",
"custom_same_anchor-001-part2.yaml"]
files = [
"custom_same_anchor-001-part1.yaml",
"custom_same_anchor-001-part2.yaml",
]
jjb_config = JJBConfig()
jjb_config.jenkins['url'] = 'http://example.com'
jjb_config.jenkins['user'] = 'jenkins'
jjb_config.jenkins['password'] = 'password'
jjb_config.builder['plugins_info'] = []
jjb_config.jenkins["url"] = "http://example.com"
jjb_config.jenkins["user"] = "jenkins"
jjb_config.jenkins["password"] = "password"
jjb_config.builder["plugins_info"] = []
jjb_config.validate()
j = YamlParser(jjb_config)
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
@ -85,22 +89,20 @@ class TestCaseLocalYamlIncludeAnchors(base.BaseTestCase):
class TestCaseLocalYamlRetainAnchors(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
def test_retain_anchors_default(self):
"""
Verify that anchors are NOT retained across files by default.
"""
files = ["custom_retain_anchors_include001.yaml",
"custom_retain_anchors.yaml"]
files = ["custom_retain_anchors_include001.yaml", "custom_retain_anchors.yaml"]
jjb_config = JJBConfig()
# use the default value for retain_anchors
jjb_config.validate()
j = YamlParser(jjb_config)
with ExpectedException(yaml.composer.ComposerError,
"found undefined alias.*"):
with ExpectedException(yaml.composer.ComposerError, "found undefined alias.*"):
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
def test_retain_anchors_enabled(self):
@ -109,11 +111,10 @@ class TestCaseLocalYamlRetainAnchors(base.BaseTestCase):
enabled in the config.
"""
files = ["custom_retain_anchors_include001.yaml",
"custom_retain_anchors.yaml"]
files = ["custom_retain_anchors_include001.yaml", "custom_retain_anchors.yaml"]
jjb_config = JJBConfig()
jjb_config.yamlparser['retain_anchors'] = True
jjb_config.yamlparser["retain_anchors"] = True
jjb_config.validate()
j = YamlParser(jjb_config)
j.load_files([os.path.join(self.fixtures_path, f) for f in files])

View File

@ -21,5 +21,5 @@ from tests import base
class TestCaseModuleSCMMacro(base.SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)

View File

@ -9,25 +9,34 @@ from tests import base
class ModuleRegistryPluginInfoTestsWithScenarios(
testscenarios.TestWithScenarios, base.BaseTestCase):
testscenarios.TestWithScenarios, base.BaseTestCase
):
scenarios = [
('s1', dict(v1='1.0.0', op='__gt__', v2='0.8.0')),
('s2', dict(v1='1.0.1alpha', op='__gt__', v2='1.0.0')),
('s3', dict(v1='1.0', op='__eq__', v2='1.0.0')),
('s4', dict(v1='1.0', op='__eq__', v2='1.0')),
('s5', dict(v1='1.0', op='__lt__', v2='1.8.0')),
('s6', dict(v1='1.0.1alpha', op='__lt__', v2='1.0.1')),
('s7', dict(v1='1.0alpha', op='__lt__', v2='1.0.0')),
('s8', dict(v1='1.0-alpha', op='__lt__', v2='1.0.0')),
('s9', dict(v1='1.1-alpha', op='__gt__', v2='1.0')),
('s10', dict(v1='1.0-SNAPSHOT', op='__lt__', v2='1.0')),
('s11', dict(v1='1.0.preview', op='__lt__', v2='1.0')),
('s12', dict(v1='1.1-SNAPSHOT', op='__gt__', v2='1.0')),
('s13', dict(v1='1.0a-SNAPSHOT', op='__lt__', v2='1.0a')),
('s14', dict(v1='1.4.6-SNAPSHOT (private-0986edd9-example)',
op='__lt__', v2='1.4.6')),
('s15', dict(v1='1.4.6-SNAPSHOT (private-0986edd9-example)',
op='__gt__', v2='1.4.5')),
("s1", dict(v1="1.0.0", op="__gt__", v2="0.8.0")),
("s2", dict(v1="1.0.1alpha", op="__gt__", v2="1.0.0")),
("s3", dict(v1="1.0", op="__eq__", v2="1.0.0")),
("s4", dict(v1="1.0", op="__eq__", v2="1.0")),
("s5", dict(v1="1.0", op="__lt__", v2="1.8.0")),
("s6", dict(v1="1.0.1alpha", op="__lt__", v2="1.0.1")),
("s7", dict(v1="1.0alpha", op="__lt__", v2="1.0.0")),
("s8", dict(v1="1.0-alpha", op="__lt__", v2="1.0.0")),
("s9", dict(v1="1.1-alpha", op="__gt__", v2="1.0")),
("s10", dict(v1="1.0-SNAPSHOT", op="__lt__", v2="1.0")),
("s11", dict(v1="1.0.preview", op="__lt__", v2="1.0")),
("s12", dict(v1="1.1-SNAPSHOT", op="__gt__", v2="1.0")),
("s13", dict(v1="1.0a-SNAPSHOT", op="__lt__", v2="1.0a")),
(
"s14",
dict(
v1="1.4.6-SNAPSHOT (private-0986edd9-example)", op="__lt__", v2="1.4.6"
),
),
(
"s15",
dict(
v1="1.4.6-SNAPSHOT (private-0986edd9-example)", op="__gt__", v2="1.4.5"
),
),
]
def setUp(self):
@ -36,13 +45,16 @@ class ModuleRegistryPluginInfoTestsWithScenarios(
jjb_config = JJBConfig()
jjb_config.validate()
plugin_info = [{'shortName': "HerpDerpPlugin",
'longName': "Blah Blah Blah Plugin"
}]
plugin_info.append({'shortName': "JankyPlugin1",
'longName': "Not A Real Plugin",
'version': self.v1
})
plugin_info = [
{"shortName": "HerpDerpPlugin", "longName": "Blah Blah Blah Plugin"}
]
plugin_info.append(
{
"shortName": "JankyPlugin1",
"longName": "Not A Real Plugin",
"version": self.v1,
}
)
self.addDetail("plugin_info", text_content(str(plugin_info)))
self.registry = ModuleRegistry(jjb_config, plugin_info)
@ -61,7 +73,7 @@ class ModuleRegistryPluginInfoTestsWithScenarios(
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict)
self.assertEqual(plugin_info['shortName'], plugin_name)
self.assertEqual(plugin_info["shortName"], plugin_name)
def test_get_plugin_info_dict_using_longName(self):
"""
@ -74,7 +86,7 @@ class ModuleRegistryPluginInfoTestsWithScenarios(
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict)
self.assertEqual(plugin_info['longName'], plugin_name)
self.assertEqual(plugin_info["longName"], plugin_name)
def test_get_plugin_info_dict_no_plugin(self):
"""
@ -101,8 +113,8 @@ class ModuleRegistryPluginInfoTestsWithScenarios(
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict)
self.assertEqual(plugin_info['shortName'], plugin_name)
self.assertEqual(plugin_info['version'], '0')
self.assertEqual(plugin_info["shortName"], plugin_name)
self.assertEqual(plugin_info["version"], "0")
def test_plugin_version_comparison(self):
"""
@ -117,7 +129,8 @@ class ModuleRegistryPluginInfoTestsWithScenarios(
op = getattr(pkg_resources.parse_version(v1), self.op)
test = op(pkg_resources.parse_version(self.v2))
self.assertTrue(test,
msg="Unexpectedly found {0} {2} {1} == False "
"when comparing versions!"
.format(v1, self.v2, self.op))
self.assertTrue(
test,
msg="Unexpectedly found {0} {2} {1} == False "
"when comparing versions!".format(v1, self.v2, self.op),
)

View File

@ -24,86 +24,91 @@ from tests import base
class TestCaseTestHelpers(base.BaseTestCase):
def test_convert_mapping_to_xml(self):
"""
Tests the test_convert_mapping_to_xml_fail_required function
"""
# Test default values
default_root = XML.Element('testdefault')
default_root = XML.Element("testdefault")
default_data = yaml.load("string: hello")
default_mappings = [('default-string', 'defaultString', 'default')]
default_mappings = [("default-string", "defaultString", "default")]
convert_mapping_to_xml(
default_root,
default_data,
default_mappings,
fail_required=True)
result = default_root.find('defaultString').text
self.assertThat(result, Equals('default'))
default_root, default_data, default_mappings, fail_required=True
)
result = default_root.find("defaultString").text
self.assertThat(result, Equals("default"))
# Test user input
user_input_root = XML.Element('testUserInput')
user_input_root = XML.Element("testUserInput")
user_input_data = yaml.load("user-input-string: hello")
user_input_mappings = [('user-input-string', 'userInputString',
'user-input')]
user_input_mappings = [("user-input-string", "userInputString", "user-input")]
convert_mapping_to_xml(
user_input_root, user_input_data, user_input_mappings, fail_required=True
)
result = user_input_root.find("userInputString").text
self.assertThat(result, Equals("hello"))
# Test missing required input
required_root = XML.Element("testrequired")
required_data = yaml.load("string: hello")
required_mappings = [("required-string", "requiredString", None)]
self.assertRaises(
MissingAttributeError,
convert_mapping_to_xml,
required_root,
required_data,
required_mappings,
fail_required=True,
)
# Test invalid user input for list
user_input_root = XML.Element("testUserInput")
user_input_data = yaml.load("user-input-string: bye")
valid_inputs = ["hello"]
user_input_mappings = [
("user-input-string", "userInputString", "user-input", valid_inputs)
]
self.assertRaises(
InvalidAttributeError,
convert_mapping_to_xml,
user_input_root,
user_input_data,
user_input_mappings,
fail_required=True)
result = user_input_root.find('userInputString').text
self.assertThat(result, Equals('hello'))
# Test missing required input
required_root = XML.Element('testrequired')
required_data = yaml.load("string: hello")
required_mappings = [('required-string', 'requiredString', None)]
self.assertRaises(MissingAttributeError,
convert_mapping_to_xml,
required_root,
required_data,
required_mappings,
fail_required=True)
# Test invalid user input for list
user_input_root = XML.Element('testUserInput')
user_input_data = yaml.load("user-input-string: bye")
valid_inputs = ['hello']
user_input_mappings = [('user-input-string', 'userInputString',
'user-input', valid_inputs)]
self.assertRaises(InvalidAttributeError,
convert_mapping_to_xml,
user_input_root,
user_input_data,
user_input_mappings)
)
# Test invalid user input for dict
user_input_root = XML.Element('testUserInput')
user_input_root = XML.Element("testUserInput")
user_input_data = yaml.load("user-input-string: later")
valid_inputs = {'hello': 'world'}
user_input_mappings = [('user-input-string', 'userInputString',
'user-input', valid_inputs)]
valid_inputs = {"hello": "world"}
user_input_mappings = [
("user-input-string", "userInputString", "user-input", valid_inputs)
]
self.assertRaises(InvalidAttributeError,
convert_mapping_to_xml,
user_input_root,
user_input_data,
user_input_mappings)
self.assertRaises(
InvalidAttributeError,
convert_mapping_to_xml,
user_input_root,
user_input_data,
user_input_mappings,
)
# Test invalid key for dict
user_input_root = XML.Element('testUserInput')
user_input_root = XML.Element("testUserInput")
user_input_data = yaml.load("user-input-string: world")
valid_inputs = {'hello': 'world'}
user_input_mappings = [('user-input-string', 'userInputString',
'user-input', valid_inputs)]
valid_inputs = {"hello": "world"}
user_input_mappings = [
("user-input-string", "userInputString", "user-input", valid_inputs)
]
self.assertRaises(InvalidAttributeError,
convert_mapping_to_xml,
user_input_root,
user_input_data,
user_input_mappings)
self.assertRaises(
InvalidAttributeError,
convert_mapping_to_xml,
user_input_root,
user_input_data,
user_input_mappings,
)

View File

@ -19,9 +19,9 @@ import os
from jenkins_jobs.modules import project_multibranch
@mock.patch('uuid.uuid4', mock.Mock(return_value='1-1-1-1-1'))
@mock.patch("uuid.uuid4", mock.Mock(return_value="1-1-1-1-1"))
class TestCaseMultibranchPipeline(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
default_config_file = '/dev/null'
default_config_file = "/dev/null"
klass = project_multibranch.WorkflowMultiBranch

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleNotifications(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = notifications.Notifications

View File

@ -30,19 +30,18 @@ class TestCaseParallel(TestCase):
def parallel_test(num_base, num_extra):
return num_base + num_extra
parallel_args = [{'num_extra': num} for num in range(10)]
parallel_args = [{"num_extra": num} for num in range(10)]
result = parallel_test(10, concurrent=parallel_args)
self.assertThat(result, matchers.Equals(expected))
def test_parallel_time_less_than_serial(self):
@concurrent
def wait(secs):
time.sleep(secs)
before = time.time()
# ten threads to make it as fast as possible
wait(concurrent=[{'secs': 1} for _ in range(10)], n_workers=10)
wait(concurrent=[{"secs": 1} for _ in range(10)], n_workers=10)
after = time.time()
self.assertThat(after - before, matchers.LessThan(5))
@ -53,18 +52,16 @@ class TestCaseParallel(TestCase):
def parallel_test(num_base, num_extra):
return num_base + num_extra
parallel_args = [{'num_extra': num} for num in range(10)]
parallel_args = [{"num_extra": num} for num in range(10)]
result = parallel_test(10, concurrent=parallel_args, n_workers=1)
self.assertThat(result, matchers.Equals(expected))
@mock.patch('jenkins_jobs.parallel.cpu_count', wraps=cpu_count)
@mock.patch("jenkins_jobs.parallel.cpu_count", wraps=cpu_count)
def test_use_auto_detect_cores(self, mockCpu_count):
@concurrent
def parallel_test():
return True
result = parallel_test(concurrent=[{} for _ in range(10)],
n_workers=0)
result = parallel_test(concurrent=[{} for _ in range(10)], n_workers=0)
self.assertThat(result, matchers.Equals([True for _ in range(10)]))
mockCpu_count.assert_called_once_with()

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleParameters(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = parameters.Parameters

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleProperties(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = properties.Properties

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModulePublishers(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = publishers.Publishers

View File

@ -21,6 +21,6 @@ from tests import base
class TestCaseModuleReporters(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = reporters.Reporters

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleSCM(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = scm.SCM

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleTriggers(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = triggers.Triggers

View File

@ -20,18 +20,18 @@ from tests import base
class TestCaseModuleViewAll(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = view_all.All
class TestCaseModuleViewList(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = view_list.List
class TestCaseModuleViewPipeline(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = view_pipeline.Pipeline

View File

@ -22,6 +22,6 @@ from tests import base
class TestCaseModuleWrappers(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = wrappers.Wrappers

View File

@ -23,23 +23,23 @@ from tests import base
class TestXmlJobGeneratorExceptions(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'exceptions')
fixtures_path = os.path.join(os.path.dirname(__file__), "exceptions")
def test_invalid_project(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config)
yp.parse(os.path.join(self.fixtures_path,
"invalid_project.yaml"))
yp.parse(os.path.join(self.fixtures_path, "invalid_project.yaml"))
reg = registry.ModuleRegistry(config)
job_data, _ = yp.expandYaml(reg)
# Generate the XML tree
xml_generator = xml_config.XmlJobGenerator(reg)
e = self.assertRaises(errors.JenkinsJobsException,
xml_generator.generateXML, job_data)
e = self.assertRaises(
errors.JenkinsJobsException, xml_generator.generateXML, job_data
)
self.assertIn("Unrecognized project-type:", str(e))
def test_invalid_view(self):
@ -54,8 +54,9 @@ class TestXmlJobGeneratorExceptions(base.BaseTestCase):
# Generate the XML tree
xml_generator = xml_config.XmlViewGenerator(reg)
e = self.assertRaises(errors.JenkinsJobsException,
xml_generator.generateXML, view_data)
e = self.assertRaises(
errors.JenkinsJobsException, xml_generator.generateXML, view_data
)
self.assertIn("Unrecognized view-type:", str(e))
def test_incorrect_template_params(self):
@ -63,8 +64,7 @@ class TestXmlJobGeneratorExceptions(base.BaseTestCase):
config = self._get_config()
yp = parser.YamlParser(config)
yp.parse(os.path.join(self.fixtures_path,
"failure_formatting_component.yaml"))
yp.parse(os.path.join(self.fixtures_path, "failure_formatting_component.yaml"))
reg = registry.ModuleRegistry(config)
reg.set_parser_data(yp.data)

View File

@ -24,20 +24,19 @@ from tests import base
class TestCaseModuleYamlInclude(base.SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
class TestYamlParserExceptions(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'exceptions')
fixtures_path = os.path.join(os.path.dirname(__file__), "exceptions")
def test_incorrect_template_dimensions(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config)
yp.parse(os.path.join(self.fixtures_path,
"incorrect_template_dimensions.yaml"))
yp.parse(os.path.join(self.fixtures_path, "incorrect_template_dimensions.yaml"))
reg = registry.ModuleRegistry(config)
@ -47,23 +46,22 @@ class TestYamlParserExceptions(base.BaseTestCase):
class TestYamlParserFailureFormattingExceptions(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'exceptions')
scenarios = [
('s1', {'name': 'template'}),
('s2', {'name': 'params'})
]
fixtures_path = os.path.join(os.path.dirname(__file__), "exceptions")
scenarios = [("s1", {"name": "template"}), ("s2", {"name": "params"})]
def test_yaml_snippet(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config)
yp.parse(os.path.join(self.fixtures_path,
"failure_formatting_{}.yaml".format(self.name)))
yp.parse(
os.path.join(
self.fixtures_path, "failure_formatting_{}.yaml".format(self.name)
)
)
reg = registry.ModuleRegistry(config)
self.assertRaises(Exception, yp.expandYaml, reg)
self.assertIn("Failure formatting {}".format(self.name),
self.logger.output)
self.assertIn("Failure formatting {}".format(self.name), self.logger.output)
self.assertIn("Problem formatting with args", self.logger.output)