Rewrite YAML parser

Rewrite YAML parser, YAML objects and parameters expansion logic to
enable better control over expansion logic.
Broken backward compatilibity:
* More agressive parameter expansion. This may lead to parameters
  expanded in places where they were not expanded before.
* Top-level elements, which is not known to parser (such as 'job',
  'view', 'project' etc), are now lead to parse failures.
  Prepend them with underscore to be ignored by parser.
* Files included using '!include-raw:' elements and having formatting in
  it's path ('lazy-loaded' in previous implementation) are now expanded
  too.
  Use '!include-raw-escape:' for them instead.
  See changes in these tests for examples:
    tests/yamlparser/job_fixtures/lazy-load-jobs-multi001.yaml
    tests/yamlparser/job_fixtures/lazy-load-jobs-multi002.yaml
    tests/yamlparser/job_fixtures/lazy-load-jobs001.yaml
* Parameters with template value using itself were substituted as is.
  For example: "timer: '{timer}'" was expanded to "{timer}". Now it
  leads to recursive parameter error.
  See changes in this test for example:
    tests/yamlparser/job_fixtures/parameter_name_reuse_default.*
    ->
    tests/yamlparser/error_fixtures/parameter_name_reuse_default.*
* When job group includes a job which was never declared, it was just
  ignored. Now it fails: job is missing.
  See changes in this test for example:
    tests/yamlparser/job_fixtures/job_group_includes_missing_job.*
    ->
    tests/yamlparser/error_fixtures/job_group_includes_missing_job.*

Change-Id: Ief4e515f065a1b9e0f74fe06d7e94fa77d69f273
This commit is contained in:
Vsevolod Fedorov 2022-07-21 11:26:50 +03:00
parent a47e4ee896
commit af9e03ec08
135 changed files with 3304 additions and 1892 deletions

View File

@ -659,12 +659,12 @@ the same anchor name in included files without collisions.
A simple example can be seen in the specs `full length example`_ with the
following being more representative of usage within JJB:
.. literalinclude:: /../../tests/localyaml/fixtures/anchors_aliases.iyaml
.. literalinclude:: /../../tests/loader/fixtures/anchors_aliases.iyaml
Which will be expanded to the following yaml before being processed:
.. literalinclude:: /../../tests/localyaml/fixtures/anchors_aliases.oyaml
.. literalinclude:: /../../tests/loader/fixtures/anchors_aliases.oyaml
.. _full length example: https://yaml.org/spec/1.2.2/#25-full-length-example
@ -674,7 +674,7 @@ Which will be expanded to the following yaml before being processed:
Custom Yaml Tags
----------------
.. automodule:: jenkins_jobs.local_yaml
.. automodule:: jenkins_jobs.yaml_objects
Modules

View File

@ -17,6 +17,7 @@ import io
import os
import logging
import platform
from pathlib import Path
from stevedore import extension
import yaml
@ -126,7 +127,9 @@ class JenkinsJobs(object):
self.options.path = [self.options.path]
else:
# take list of paths
self.options.path = self.options.path.split(os.pathsep)
self.options.path = [
Path(p) for p in self.options.path.split(os.pathsep)
]
do_recurse = (
getattr(self.options, "recursive", False)
@ -142,7 +145,7 @@ class JenkinsJobs(object):
paths.extend(utils.recurse_path(path, excludes))
else:
paths.append(path)
self.options.path = paths
self.options.path = [Path(p) for p in paths]
def execute(self):

View File

@ -14,11 +14,39 @@
# under the License.
import abc
import six
import fnmatch
import logging
import time
from jenkins_jobs.builder import JenkinsManager
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.roots import Roots
from jenkins_jobs.xml_config import XmlJobGenerator
from jenkins_jobs.xml_config import XmlViewGenerator
from jenkins_jobs.loader import load_files
@six.add_metaclass(abc.ABCMeta)
class BaseSubCommand(object):
logger = logging.getLogger(__name__)
def matches(name, glob_list):
"""
Checks if the given string, ``name``, matches any of the glob patterns in
the iterable, ``glob_list``
:arg str name: String (job or view name) to test if it matches a pattern
:arg iterable glob_list: glob patterns to match (list, tuple, set, etc.)
"""
return any(fnmatch.fnmatch(name, glob) for glob in glob_list)
def filter_matching(item_list, glob_list):
if not glob_list:
return item_list
return [item for item in item_list if matches(item["name"], glob_list)]
class BaseSubCommand(metaclass=abc.ABCMeta):
"""Base class for Jenkins Job Builder subcommands, intended to allow
subcommands to be loaded as stevedore extensions by third party users.
"""
@ -69,3 +97,39 @@ class BaseSubCommand(object):
help="paths to exclude when using recursive search, "
"uses standard globbing.",
)
class JobsSubCommand(BaseSubCommand):
"""Base class for Jenkins Job Builder subcommands which generates jobs."""
def load_roots(self, jjb_config, path_list):
roots = Roots(jjb_config)
load_files(jjb_config, roots, path_list)
return roots
def make_jobs_and_views_xml(self, jjb_config, path_list, glob_list):
logger.info("Updating jobs in {0} ({1})".format(path_list, glob_list))
orig = time.time()
roots = self.load_roots(jjb_config, path_list)
builder = JenkinsManager(jjb_config)
registry = ModuleRegistry(jjb_config, builder.plugins_list)
registry.set_macros(roots.macros)
jobs = filter_matching(roots.generate_jobs(), glob_list)
views = filter_matching(roots.generate_views(), glob_list)
registry.amend_job_dicts(jobs)
xml_job_generator = XmlJobGenerator(registry)
xml_view_generator = XmlViewGenerator(registry)
xml_jobs = xml_job_generator.generateXML(jobs)
xml_views = xml_view_generator.generateXML(views)
step = time.time()
logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))
return builder, xml_jobs, xml_views

View File

@ -16,12 +16,10 @@
from jenkins_jobs.builder import JenkinsManager
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
import jenkins_jobs.cli.subcommand.base as base
class DeleteSubCommand(base.BaseSubCommand):
class DeleteSubCommand(base.JobsSubCommand):
def parse_args(self, subparser):
delete = subparser.add_parser("delete")
@ -59,23 +57,20 @@ class DeleteSubCommand(base.BaseSubCommand):
'"--views-only" and "--jobs-only" cannot be used together.'
)
fn = options.path
registry = ModuleRegistry(jjb_config, builder.plugins_list)
parser = YamlParser(jjb_config)
if fn:
parser.load_files(fn)
parser.expandYaml(registry, options.name)
jobs = [j["name"] for j in parser.jobs]
views = [v["name"] for v in parser.views]
if options.path:
roots = self.load_roots(jjb_config, options.path)
jobs = base.filter_matching(roots.generate_jobs(), options.name)
views = base.filter_matching(roots.generate_views(), options.name)
job_names = [j["name"] for j in jobs]
view_names = [v["name"] for v in views]
else:
jobs = options.name
views = options.name
job_names = options.name
view_names = options.name
if options.del_jobs:
builder.delete_jobs(jobs)
builder.delete_jobs(job_names)
elif options.del_views:
builder.delete_views(views)
builder.delete_views(view_names)
else:
builder.delete_jobs(jobs)
builder.delete_views(views)
builder.delete_jobs(job_names)
builder.delete_views(view_names)

View File

@ -14,11 +14,10 @@
# under the License.
import logging
import sys
from jenkins_jobs.builder import JenkinsManager
import jenkins_jobs.cli.subcommand.base as base
import jenkins_jobs.utils as utils
import jenkins_jobs.builder as builder
import jenkins_jobs.parser as parser
import jenkins_jobs.registry as registry
def list_duplicates(seq):
@ -26,7 +25,7 @@ def list_duplicates(seq):
return set(x for x in seq if x in seen or seen.add(x))
class ListSubCommand(base.BaseSubCommand):
class ListSubCommand(base.JobsSubCommand):
def parse_args(self, subparser):
list = subparser.add_parser("list", help="List jobs")
@ -38,10 +37,7 @@ class ListSubCommand(base.BaseSubCommand):
)
def execute(self, options, jjb_config):
self.jjb_config = jjb_config
self.jenkins = builder.JenkinsManager(jjb_config)
jobs = self.get_jobs(options.names, options.path)
jobs = self.get_jobs(jjb_config, options.path, options.names)
logging.info("Matching jobs: %d", len(jobs))
stdout = utils.wrap_stream(sys.stdout)
@ -49,24 +45,23 @@ class ListSubCommand(base.BaseSubCommand):
for job in jobs:
stdout.write((job + "\n").encode("utf-8"))
def get_jobs(self, jobs_glob=None, fn=None):
if fn:
r = registry.ModuleRegistry(self.jjb_config, self.jenkins.plugins_list)
p = parser.YamlParser(self.jjb_config)
p.load_files(fn)
p.expandYaml(r, jobs_glob)
jobs = [j["name"] for j in p.jobs]
def get_jobs(self, jjb_config, path_list, glob_list):
if path_list:
roots = self.load_roots(jjb_config, path_list)
jobs = base.filter_matching(roots.generate_jobs(), glob_list)
job_names = [j["name"] for j in jobs]
else:
jobs = [
jenkins = JenkinsManager(jjb_config)
job_names = [
j["fullname"]
for j in self.jenkins.get_jobs()
if not jobs_glob or parser.matches(j["fullname"], jobs_glob)
for j in jenkins.get_jobs()
if not glob_list or base.matches(j["fullname"], glob_list)
]
jobs = sorted(jobs)
for duplicate in list_duplicates(jobs):
job_names = sorted(job_names)
for duplicate in list_duplicates(job_names):
logging.warning("Found duplicate job name '%s', likely bug.", duplicate)
logging.debug("Builder.get_jobs: returning %r", jobs)
logging.debug("Builder.get_jobs: returning %r", job_names)
return jobs
return job_names

View File

@ -51,7 +51,7 @@ class TestSubCommand(update.UpdateSubCommand):
def execute(self, options, jjb_config):
if not options.config_xml:
logger.warn(
logger.warning(
"(Deprecated) The default output behavior of"
" `jenkins-jobs test` when given the --output"
" flag will change in JJB 3.0."
@ -61,7 +61,9 @@ class TestSubCommand(update.UpdateSubCommand):
" `--config-xml` parameter."
)
builder, xml_jobs, xml_views = self._generate_xmljobs(options, jjb_config)
builder, xml_jobs, xml_views = self.make_jobs_and_views_xml(
jjb_config, options.path, options.names
)
builder.update_jobs(
xml_jobs,

View File

@ -15,13 +15,7 @@
import logging
import sys
import time
from jenkins_jobs.builder import JenkinsManager
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJobGenerator
from jenkins_jobs.xml_config import XmlViewGenerator
from jenkins_jobs.errors import JenkinsJobsException
import jenkins_jobs.cli.subcommand.base as base
@ -29,7 +23,7 @@ import jenkins_jobs.cli.subcommand.base as base
logger = logging.getLogger(__name__)
class UpdateSubCommand(base.BaseSubCommand):
class UpdateSubCommand(base.JobsSubCommand):
def parse_arg_path(self, parser):
parser.add_argument(
"path",
@ -107,39 +101,15 @@ class UpdateSubCommand(base.BaseSubCommand):
help="update only views",
)
def _generate_xmljobs(self, options, jjb_config=None):
builder = JenkinsManager(jjb_config)
logger.info("Updating jobs in {0} ({1})".format(options.path, options.names))
orig = time.time()
# Generate XML
parser = YamlParser(jjb_config)
registry = ModuleRegistry(jjb_config, builder.plugins_list)
xml_job_generator = XmlJobGenerator(registry)
xml_view_generator = XmlViewGenerator(registry)
parser.load_files(options.path)
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry, options.names)
xml_jobs = xml_job_generator.generateXML(job_data_list)
xml_views = xml_view_generator.generateXML(view_data_list)
jobs = parser.jobs
step = time.time()
logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))
return builder, xml_jobs, xml_views
def execute(self, options, jjb_config):
if options.n_workers < 0:
raise JenkinsJobsException(
"Number of workers must be equal or greater than 0"
)
builder, xml_jobs, xml_views = self._generate_xmljobs(options, jjb_config)
builder, xml_jobs, xml_views = self.make_jobs_and_views_xml(
jjb_config, options.path, options.names
)
if options.enabled_only:
# filter out jobs which are disabled

187
jenkins_jobs/defaults.py Normal file
View File

@ -0,0 +1,187 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dataclasses import dataclass
job_contents_keys = {
# Same as for macros.
"parameters",
"properties",
"builders",
"wrappers",
"triggers",
"publishers",
"scm",
"pipeline-scm",
"reporters",
# General.
"project-type",
"folder",
"node",
"jdk",
"actions",
"disabled",
"display-name",
"block-downstream",
"block-upstream",
"auth-token",
"concurrent",
"workspace",
"child-workspace",
"quiet-period",
"retry-count",
"logrotate",
"raw",
# Builders.
"prebuilders",
"postbuilders",
# HipChat.
"hipchat",
# Notificatoins.
"notifications",
# project Flow.
"dsl",
"needs-workspace",
"dsl-file",
# GithubOrganization.
"prune-dead-branches",
"days-to-keep",
"number-to-keep",
"periodic-folder-trigger",
"github-org",
"script-path",
# Matrix.
"execution-strategy",
"yaml-strategy",
"p4-strategy",
"axes",
# Maven.
"maven",
"per-module-email",
# WorkflowMultiBranch.
"sandbox",
"script-id",
"script-path",
"prune-dead-branches",
"days-to-keep",
"number-to-keep",
"periodic-folder-trigger",
# Pipeline.
"dsl",
"sandbox",
# project Workflow.
"dsl",
"sandbox",
}
view_contents_keys = {
# Common.
"filter-executors",
"filter-queue",
# All
# <nothing>
# List.
"job-name",
"job-filters",
"width",
"alignment",
"columns",
"regex",
"recurse",
# Sectioned.
"sections",
# SectionedText.
"width",
"alignment",
"text",
"style",
# DeliveryPipeline.
"aggregated-changes-grouping-pattern",
"allow-abort",
"allow-manual-triggers",
"allow-pipeline-start",
"allow-rebuild",
"link-relative",
"link-to-console-log",
"max-number-of-visible-pipelines",
"name",
"no-of-columns",
"no-of-pipelines",
"paging-enabled",
"show-absolute-date-time",
"show-aggregated-changes",
"show-aggregated-pipeline",
"show-avatars",
"show-changes",
"show-description",
"show-promotions",
"show-static-analysis-results",
"show-test-results",
"show-total-build-time",
"update-interval",
"sorting",
"components",
"regexps",
# Nested.
"views",
"default-view",
"columns",
# Pipeline.
"first-job",
"name",
"no-of-displayed-builds",
"title",
"link-style",
"css-Url",
"latest-job-only",
"manual-trigger",
"show-parameters",
"parameters-in-headers",
"start-with-parameters",
"refresh-frequency",
"definition-header",
}
def split_contents_params(data, contents_keys):
contents = {key: value for key, value in data.items() if key in contents_keys}
params = {key: value for key, value in data.items() if key not in contents_keys}
return (contents, params)
@dataclass
class Defaults:
name: str
params: dict
contents: dict # Values that go to job contents.
@classmethod
def add(cls, config, roots, expander, params_expander, data):
d = {**data}
name = d.pop("name")
contents, params = split_contents_params(
d, job_contents_keys | view_contents_keys
)
defaults = cls(name, params, contents)
roots.defaults[name] = defaults
@classmethod
def empty(cls):
return Defaults("empty", params={}, contents={})
def merged_with_global(self, global_):
return Defaults(
name=f"{self.name}-merged-with-global",
params={**global_.params, **self.params},
contents={**global_.contents, **self.contents},
)

View File

@ -0,0 +1,89 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from .errors import JenkinsJobsException
def merge_dicts(dict_list):
result = {}
for d in dict_list:
result.update(d)
return result
class DimensionsExpander:
def __init__(self, context):
self._context = context
def enum_dimensions_params(self, axes, params, defaults):
if not axes:
# No axes - instantiate one job/view.
yield {}
return
dim_values = []
for axis in axes:
try:
value = params[axis]
except KeyError:
try:
value = defaults[axis]
except KeyError:
continue # May be, value would be received from an another axis values.
value = self._decode_axis_value(axis, value)
dim_values.append(value)
for values in itertools.product(*dim_values):
yield merge_dicts(values)
def _decode_axis_value(self, axis, value):
if not isinstance(value, list):
yield {axis: value}
return
for item in value:
if not isinstance(item, dict):
yield {axis: item}
continue
if len(item.items()) != 1:
raise JenkinsJobsException(
f"Invalid parameter {axis!r} definition for template {self._context!r}:"
f" Expected a value or a dict with single element, but got: {item!r}"
)
value, p = next(iter(item.items()))
yield {
axis: value, # Point axis value.
**p, # Point-specific parameters. May override asis value.
}
def is_point_included(self, exclude_list, params):
return not any(self._match_exclude(params, el) for el in exclude_list or [])
def _match_exclude(self, params, exclude):
if not isinstance(exclude, dict):
raise JenkinsJobsException(
f"Template {self._context!r}: Exclude element should be dict, but is: {exclude!r}"
)
if not exclude:
raise JenkinsJobsException(
f"Template {self._context!r}: Exclude element should be dict, but is empty: {exclude!r}"
)
for axis, value in exclude.items():
try:
v = params[axis]
except KeyError:
raise JenkinsJobsException(
f"Template {self._context!r}: Unknown axis {axis!r} for exclude element: {exclude!r}"
)
if value != v:
return False
# All required exclude values are matched.
return True

214
jenkins_jobs/expander.py Normal file
View File

@ -0,0 +1,214 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from functools import partial
from jinja2 import StrictUndefined
from .errors import JenkinsJobsException
from .formatter import CustomFormatter, enum_str_format_required_params
from .yaml_objects import (
J2String,
J2Yaml,
YamlInclude,
YamlListJoin,
IncludeJinja2,
IncludeRaw,
IncludeRawEscape,
)
def expand_dict(expander, obj, params):
result = {}
for key, value in obj.items():
expanded_key = expander.expand(key, params)
expanded_value = expander.expand(value, params)
result[expanded_key] = expanded_value
return result
def expand_list(expander, obj, params):
return [expander.expand(item, params) for item in obj]
def expand_tuple(expander, obj, params):
return tuple(expander.expand(item, params) for item in obj)
class StrExpander:
def __init__(self, config):
allow_empty = config.yamlparser["allow_empty_variables"]
self._formatter = CustomFormatter(allow_empty)
def __call__(self, obj, params):
return self._formatter.format(obj, **params)
def call_expand(expander, obj, params):
return obj.expand(expander, params)
def call_subst(expander, obj, params):
return obj.subst(expander, params)
def dont_expand(obj, params):
return obj
yaml_classes_list = [
J2String,
J2Yaml,
YamlInclude,
YamlListJoin,
IncludeJinja2,
IncludeRaw,
IncludeRawEscape,
]
deprecated_yaml_tags = [
("!include", YamlInclude),
("!include-raw", IncludeRaw),
("!include-raw-escape", IncludeRawEscape),
]
# Does not expand string formats. Used in jobs and macros without parameters.
class Expander:
def __init__(self, config):
_yaml_object_expanders = {
cls: partial(call_expand, self) for cls in yaml_classes_list
}
self.expanders = {
dict: partial(expand_dict, self),
list: partial(expand_list, self),
tuple: partial(expand_tuple, self),
str: dont_expand,
bool: dont_expand,
int: dont_expand,
float: dont_expand,
type(None): dont_expand,
**_yaml_object_expanders,
}
def expand(self, obj, params):
t = type(obj)
try:
expander = self.expanders[t]
except KeyError:
raise RuntimeError(f"Do not know how to expand type: {t!r}")
return expander(obj, params)
# Expands string formats also. Used in jobs templates and macros with parameters.
class ParamsExpander(Expander):
def __init__(self, config):
super().__init__(config)
_yaml_object_expanders = {
cls: partial(call_subst, self) for cls in yaml_classes_list
}
self.expanders.update(
{
str: StrExpander(config),
**_yaml_object_expanders,
}
)
def call_required_params(obj):
yield from obj.required_params
def enum_dict_params(obj):
for key, value in obj.items():
yield from enum_required_params(key)
yield from enum_required_params(value)
def enum_seq_params(obj):
for value in obj:
yield from enum_required_params(value)
def no_parameters(obj):
return []
yaml_classes_enumers = {cls: call_required_params for cls in yaml_classes_list}
param_enumers = {
str: enum_str_format_required_params,
dict: enum_dict_params,
list: enum_seq_params,
tuple: enum_seq_params,
bool: no_parameters,
int: no_parameters,
float: no_parameters,
type(None): no_parameters,
**yaml_classes_enumers,
}
# Do not expand these.
disable_expand_for = {"template-name"}
def enum_required_params(obj):
t = type(obj)
try:
enumer = param_enumers[t]
except KeyError:
raise RuntimeError(
f"Do not know how to enumerate required parameters for type: {t!r}"
)
return enumer(obj)
def expand_parameters(expander, param_dict, template_name):
expanded_params = {}
deps = {} # Using dict as ordered set.
def expand(name):
try:
return expanded_params[name]
except KeyError:
pass
try:
format = param_dict[name]
except KeyError:
return StrictUndefined(name=name)
if name in deps:
raise RuntimeError(
f"While expanding {name!r} for template {template_name!r}:"
f" Recursive parameters usage: {name} <- {' <- '.join(deps)}"
)
if name in disable_expand_for:
value = format
else:
required_params = list(enum_required_params(format))
deps[name] = None
try:
params = {n: expand(n) for n in required_params}
finally:
deps.popitem()
try:
value = expander.expand(format, params)
except JenkinsJobsException as x:
used_by_deps = ", used by".join(f"{d!r}" for d in deps)
raise RuntimeError(
f"While expanding {name!r}, used by {used_by_deps}, used by template {template_name!r}: {x}"
)
expanded_params[name] = value
return value
for name in param_dict:
expand(name)
return expanded_params

View File

@ -15,82 +15,19 @@
# Manage interpolation of JJB variables into template strings.
import _string
import logging
from pprint import pformat
import re
from string import Formatter
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.local_yaml import CustomLoader
from jinja2 import Undefined
from jinja2.exceptions import UndefinedError
from .errors import JenkinsJobsException
logger = logging.getLogger(__name__)
def deep_format(obj, paramdict, allow_empty=False):
"""Deep format configuration.
Apply the paramdict via str.format() to all string objects found within
the supplied obj. Lists and dicts are traversed recursively.
"""
# YAML serialisation was originally used to achieve this, but that places
# limitations on the values in paramdict - the post-format result must
# still be valid YAML (so substituting-in a string containing quotes, for
# example, is problematic).
if hasattr(obj, "format"):
try:
ret = CustomFormatter(allow_empty).format(obj, **paramdict)
except KeyError as exc:
missing_key = exc.args[0]
desc = "%s parameter missing to format %s\nGiven:\n%s" % (
missing_key,
obj,
pformat(paramdict),
)
raise JenkinsJobsException(desc)
except Exception:
logging.error(
"Problem formatting with args:\nallow_empty:"
"%s\nobj: %s\nparamdict: %s" % (allow_empty, obj, paramdict)
)
raise
elif isinstance(obj, list):
ret = type(obj)()
for item in obj:
ret.append(deep_format(item, paramdict, allow_empty))
elif isinstance(obj, dict):
ret = type(obj)()
for item in obj:
try:
ret[deep_format(item, paramdict, allow_empty)] = deep_format(
obj[item], paramdict, allow_empty
)
except KeyError as exc:
missing_key = exc.args[0]
desc = "%s parameter missing to format %s\nGiven:\n%s" % (
missing_key,
obj,
pformat(paramdict),
)
raise JenkinsJobsException(desc)
except Exception:
logging.error(
"Problem formatting with args:\nallow_empty:"
"%s\nobj: %s\nparamdict: %s" % (allow_empty, obj, paramdict)
)
raise
else:
ret = obj
if isinstance(ret, CustomLoader):
# If we have a CustomLoader here, we've lazily-loaded a template
# or rendered a template to a piece of YAML;
# attempt to format it.
ret = deep_format(
ret.get_object_to_format(), paramdict, allow_empty=allow_empty
)
return ret
class CustomFormatter(Formatter):
"""
Custom formatter to allow non-existing key references when formatting a
@ -104,25 +41,25 @@ class CustomFormatter(Formatter):
(?:\|(?P<default>[^}]*))? # default fallback
}(}})*(?!}) # non-pair closing }
"""
_matcher = re.compile(_expr, re.VERBOSE)
_whole_matcher = re.compile(f"^{_expr}$", re.VERBOSE)
def __init__(self, allow_empty=False):
super(CustomFormatter, self).__init__()
super().__init__()
self.allow_empty = allow_empty
def vformat(self, format_string, args, kwargs):
matcher = re.compile(self._expr, re.VERBOSE)
# special case of returning the object if the entire string
# matches a single parameter
try:
result = re.match("^%s$" % self._expr, format_string, re.VERBOSE)
except TypeError:
return format_string.format(**kwargs)
# Special case of returning the object preserving it's type if the entire string
# matches a single parameter.
result = self._whole_matcher.match(format_string)
if result is not None:
try:
return kwargs[result.group("key")]
value = kwargs[result.group("key")]
except KeyError:
pass
else:
if not isinstance(value, Undefined):
return value
# handle multiple fields within string via a callback to re.sub()
def re_replace(match):
@ -130,23 +67,65 @@ class CustomFormatter(Formatter):
default = match.group("default")
if default is not None:
if key not in kwargs:
if key not in kwargs or isinstance(kwargs[key], Undefined):
return default
else:
return "{%s}" % key
return match.group(0)
format_string = matcher.sub(re_replace, format_string)
format_string = self._matcher.sub(re_replace, format_string)
return Formatter.vformat(self, format_string, args, kwargs)
try:
return super().vformat(format_string, args, kwargs)
except (JenkinsJobsException, UndefinedError) as x:
if len(format_string) > 40:
short_fmt = format_string[:80] + "..."
else:
short_fmt = format_string
raise JenkinsJobsException(f"While formatting string {short_fmt!r}: {x}")
def enum_required_params(self, format_string):
def re_replace(match):
key = match.group("key")
return "{%s}" % key
prepared_format_string = self._matcher.sub(re_replace, format_string)
for literal_text, field_name, format_spec, conversion in self.parse(
prepared_format_string
):
if field_name is None:
continue
arg_used, rest = _string.formatter_field_name_split(field_name)
if arg_used == "" or type(arg_used) is int:
raise RuntimeError(
f"Positional format arguments are not supported: {format_string!r}"
)
yield arg_used
def enum_param_defaults(self, format_string):
for match in self._matcher.finditer(format_string):
key = match.group("key")
default = match.group("default")
if default is not None:
yield (key, default)
def get_value(self, key, args, kwargs):
try:
return Formatter.get_value(self, key, args, kwargs)
return super().get_value(key, args, kwargs)
except KeyError:
if self.allow_empty:
logger.debug(
"Found uninitialized key %s, replaced with empty string", key
)
return ""
raise
raise JenkinsJobsException(f"Missing parameter: {key!r}")
def enum_str_format_required_params(format):
formatter = CustomFormatter()
yield from formatter.enum_required_params(format)
def enum_str_format_param_defaults(format):
formatter = CustomFormatter()
yield from formatter.enum_param_defaults(format)

107
jenkins_jobs/job.py Normal file
View File

@ -0,0 +1,107 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dataclasses import dataclass
from .root_base import RootBase, NonTemplateRootMixin, TemplateRootMixin, Group
from .defaults import split_contents_params, job_contents_keys
@dataclass
class JobBase(RootBase):
project_type: str
folder: str
@classmethod
def from_dict(cls, config, roots, expander, data):
keep_descriptions = config.yamlparser["keep_descriptions"]
d = {**data}
name = d.pop("name")
id = d.pop("id", None)
description = d.pop("description", None)
defaults = d.pop("defaults", "global")
project_type = d.pop("project-type", None)
folder = d.pop("folder", None)
contents, params = split_contents_params(d, job_contents_keys)
return cls(
roots.defaults,
expander,
keep_descriptions,
id,
name,
description,
defaults,
params,
contents,
project_type,
folder,
)
def _as_dict(self):
data = {
"name": self._full_name,
**self.contents,
}
if self.project_type:
data["project-type"] = self.project_type
return data
@property
def _full_name(self):
if self.folder:
return f"{self.folder}/{self.name}"
else:
return self.name
class Job(JobBase, NonTemplateRootMixin):
@classmethod
def add(cls, config, roots, expander, param_expander, data):
job = cls.from_dict(config, roots, expander, data)
roots.assign(roots.jobs, job.id, job, "job")
class JobTemplate(JobBase, TemplateRootMixin):
@classmethod
def add(cls, config, roots, expander, params_expander, data):
template = cls.from_dict(config, roots, params_expander, data)
roots.assign(roots.job_templates, template.id, template, "job template")
@dataclass
class JobGroup(Group):
_jobs: dict
_job_templates: dict
@classmethod
def add(cls, config, roots, expander, params_expander, data):
d = {**data}
name = d.pop("name")
job_specs = [
cls._spec_from_dict(item, error_context=f"Job group {name}")
for item in d.pop("jobs", [])
]
group = cls(
name,
job_specs,
d,
roots.jobs,
roots.job_templates,
)
roots.assign(roots.job_groups, group.name, group, "job group")
def __str__(self):
return f"Job group {self.name}"
@property
def _root_dicts(self):
return [self._jobs, self._job_templates]

151
jenkins_jobs/loader.py Normal file
View File

@ -0,0 +1,151 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import io
import logging
from functools import partial
import yaml
from .errors import JenkinsJobsException
from .yaml_objects import BaseYamlObject
from .expander import Expander, ParamsExpander, deprecated_yaml_tags, yaml_classes_list
from .roots import root_adders
logger = logging.getLogger(__name__)
class Loader(yaml.Loader):
@classmethod
def empty(cls, jjb_config):
return cls(io.StringIO(), jjb_config)
def __init__(self, stream, jjb_config, source_path=None, anchors=None):
super().__init__(stream)
self.jjb_config = jjb_config
self.source_path = source_path
self._retain_anchors = jjb_config.yamlparser["retain_anchors"]
if anchors:
# Override default set by super class.
self.anchors = anchors
# Override the default composer to skip resetting the anchors at the
# end of the current document.
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
return node
def _with_stream(self, stream, source_path):
return Loader(stream, self.jjb_config, source_path, self.anchors)
def load_fp(self, fp):
return self.load(fp)
def load_path(self, path):
return self.load(path.read_text(), source_path=path)
def load(self, stream, source_path=None):
loader = self._with_stream(stream, source_path)
try:
return loader.get_single_data()
finally:
loader.dispose()
if self._retain_anchors:
self.anchors.update(loader.anchors)
def load_deprecated_yaml(tag, cls, loader, node):
logger.warning("Tag %r is deprecated, switch to using %r", tag, cls.yaml_tag)
return cls.from_yaml(loader, node)
for cls in yaml_classes_list:
yaml.add_constructor(cls.yaml_tag, cls.from_yaml, Loader)
for tag, cls in deprecated_yaml_tags:
yaml.add_constructor(tag, partial(load_deprecated_yaml, tag, cls), Loader)
def is_stdin(path):
return hasattr(path, "read")
def enum_expanded_paths(path_list):
visited_set = set()
def real(path):
real_path = path.resolve()
if real_path in visited_set:
logger.warning(
"File '%s' is already added as '%s'; ignoring reference to avoid"
" duplicating YAML definitions.",
path,
real_path,
)
else:
yield real_path
visited_set.add(real_path)
for path in path_list:
if is_stdin(path):
yield path
elif path.is_dir():
for p in path.iterdir():
if p.suffix in {".yml", ".yaml"}:
yield from real(p)
else:
yield from real(path)
def load_files(config, roots, path_list):
expander = Expander(config)
params_expander = ParamsExpander(config)
loader = Loader.empty(config)
for path in enum_expanded_paths(path_list):
if is_stdin(path):
data = loader.load_fp(path)
else:
data = loader.load_path(path)
if not isinstance(data, list):
raise JenkinsJobsException(
f"The topmost collection in file '{path}' must be a list,"
f" not a {type(data)}"
)
for item in data:
if not isinstance(item, dict):
raise JenkinsJobsException(
f"{path}: Topmost list should contain single-item dict,"
f" not a {type(item)}. Missing indent?"
)
if len(item) != 1:
raise JenkinsJobsException(
f"{path}: Topmost dict should be single-item,"
f" but have keys {item.keys()}. Missing indent?"
)
kind, contents = next(iter(item.items()))
if kind.startswith("_"):
continue
if isinstance(contents, BaseYamlObject):
contents = contents.expand(expander, params={})
try:
adder = root_adders[kind]
except KeyError:
raise JenkinsJobsException(
f"{path}: Unknown topmost element type : {kind!r},"
f" Known are: {','.join(root_adders)}."
)
adder(config, roots, expander, params_expander, contents)

View File

@ -1,676 +0,0 @@
#!/usr/bin/env python
# Copyright (C) 2013 Hewlett-Packard.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Provides local yaml parsing classes and extend yaml module
"""Custom application specific yamls tags are supported to provide
enhancements when reading yaml configuration.
Action Tags
^^^^^^^^^^^
These allow manipulation of data being stored in one layout in the source
yaml for convenience and/or clarity, to another format to be processed by
the targeted module instead of requiring all modules in JJB being capable
of supporting multiple input formats.
The tag ``!join:`` will treat the first element of the following list as
the delimiter to use, when joining the remaining elements into a string
and returning a single string to be consumed by the specified module option.
This allows users to maintain elements of data in a list structure for ease
of review/maintenance, and have the yaml parser convert it to a string for
consumption as any argument for modules. The main expected use case is to
allow for generic plugin data such as shell properties to be populated from
a list construct which the yaml parser converts to a single string, instead
of trying to support this within the module code which would require a
templating engine similar to Jinja.
Generic Example:
.. literalinclude:: /../../tests/localyaml/fixtures/joinlists.yaml
Environment Inject:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/string_join.yaml
While this mechanism can also be used items where delimiters are supported by
the module, that should be considered a bug that the existing code doesn't
handle being provided a list and delimiter to perform the correct conversion
for you. Should you discover a module that takes arguments with delimiters and
the existing JJB codebase does not handle accepting lists, then this can be
used as a temporary solution in place of using very long strings:
Extended Params Example:
.. literalinclude::
/../../tests/parameters/fixtures/extended-choice-param-full.yaml
Inclusion Tags
^^^^^^^^^^^^^^
These allow inclusion of arbitrary files as a method of having blocks of data
managed separately to the yaml job configurations. A specific usage of this is
inlining scripts contained in separate files, although such tags may also be
used to simplify usage of macros or job templates.
The tag ``!include:`` will treat the following string as file which should be
parsed as yaml configuration data.
Example:
.. literalinclude:: /../../tests/localyaml/fixtures/include001.yaml
contents of include001.yaml.inc:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/include001.yaml.inc
The tag ``!include-raw:`` will treat the given string or list of strings as
filenames to be opened as one or more data blob, which should be read into
the calling yaml construct without any further parsing. Any data in a file
included through this tag, will be treated as string data.
Examples:
.. literalinclude:: /../../tests/localyaml/fixtures/include-raw001.yaml
contents of include-raw001-hello-world.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-hello-world.sh
contents of include-raw001-vars.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-vars.sh
using a list of files:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-multi001.yaml
The tag ``!include-raw-escape:`` treats the given string or list of strings as
filenames to be opened as one or more data blobs, which should be escaped
before being read in as string data. This allows job-templates to use this tag
to include scripts from files without needing to escape braces in the original
file.
.. warning::
When used as a macro ``!include-raw-escape:`` should only be used if
parameters are passed into the escaped file and you would like to escape
those parameters. If the file does not have any jjb parameters passed into
it then ``!include-raw:`` should be used instead otherwise you will run
into an interesting issue where ``include-raw-escape:`` actually adds
additional curly braces around existing curly braces. For example
${PROJECT} becomes ${{PROJECT}} which may break bash scripts.
Examples:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-escaped001.yaml
contents of include-raw001-hello-world.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-hello-world.sh
contents of include-raw001-vars.sh:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw001-vars.sh
using a list of files:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-escaped-multi001.yaml
For all the multi file includes, the files are simply appended using a newline
character.
To allow for job templates to perform substitution on the path names, when a
filename containing a python format placeholder is encountered, lazy loading
support is enabled, where instead of returning the contents back during yaml
parsing, it is delayed until the variable substitution is performed.
Example:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/lazy-load-jobs001.yaml
using a list of files:
.. literalinclude::
/../../tests/yamlparser/job_fixtures/lazy-load-jobs-multi001.yaml
.. note::
Because lazy-loading involves performing the substitution on the file
name, it means that jenkins-job-builder can not call the variable
substitution on the contents of the file. This means that the
``!include-raw:`` tag will behave as though ``!include-raw-escape:`` tag
was used instead whenever name substitution on the filename is to be
performed.
Given the behaviour described above, when substitution is to be performed
on any filename passed via ``!include-raw-escape:`` the tag will be
automatically converted to ``!include-raw:`` and no escaping will be
performed.
The tag ``!include-jinja2:`` will treat the given string or list of strings as
filenames to be opened as Jinja2 templates, which should be rendered to a
string and included in the calling YAML construct. (This is analogous to the
templating that will happen with ``!include-raw``.)
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja01.yaml
contents of jinja01.yaml.inc:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja01.yaml.inc
The tag ``!j2:`` takes a string and treats it as a Jinja2 template. It will be
rendered (with the variables in that context) and included in the calling YAML
construct.
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja-string01.yaml
The tag ``!j2-yaml:`` is similar to the ``!j2:`` tag, just that it loads the
Jinja-rendered string as YAML and embeds it in the calling YAML construct. This
provides a very flexible and convenient way of generating pieces of YAML
structures. One of use cases is defining complex YAML structures with much
simpler configuration, without any duplication.
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja-yaml01.yaml
Another use case is controlling lists dynamically, like conditionally adding
list elements based on project configuration.
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja-yaml02.yaml
"""
import functools
import io
import logging
import os
import re
import copy
import jinja2
import yaml
from yaml.constructor import BaseConstructor
from yaml.representer import BaseRepresenter
from yaml import YAMLObject
from collections import OrderedDict
logger = logging.getLogger(__name__)
class OrderedConstructor(BaseConstructor):
"""The default constructor class for PyYAML loading uses standard python
dictionaries which can have randomized ordering enabled (default in
CPython from version 3.3). The order of the XML elements being outputted
is both important for tests and for ensuring predictable generation based
on the source. This subclass overrides this behaviour to ensure that all
dict's created make use of OrderedDict to have iteration of keys to always
follow the order in which the keys were inserted/created.
"""
def construct_yaml_map(self, node):
data = OrderedDict()
yield data
value = self.construct_mapping(node)
if isinstance(node, yaml.MappingNode):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(
None,
None,
"expected a mapping node, but found %s" % node.id,
node.start_mark,
)
mapping = OrderedDict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=False)
try:
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError(
"while constructing a mapping",
node.start_mark,
"found unacceptable key (%s)" % exc,
key_node.start_mark,
)
value = self.construct_object(value_node, deep=False)
mapping[key] = value
data.update(mapping)
class OrderedRepresenter(BaseRepresenter):
def represent_yaml_mapping(self, mapping, flow_style=None):
tag = "tag:yaml.org,2002:map"
node = self.represent_mapping(tag, mapping, flow_style=flow_style)
return node
class LocalAnchorLoader(yaml.Loader):
"""Subclass for yaml.Loader which keeps Alias between calls"""
anchors = {}
def __init__(self, *args, **kwargs):
super(LocalAnchorLoader, self).__init__(*args, **kwargs)
self.anchors = LocalAnchorLoader.anchors
@classmethod
def reset_anchors(cls):
cls.anchors = {}
# override the default composer to skip resetting the anchors at the
# end of the current document
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
return node
class LocalLoader(OrderedConstructor, LocalAnchorLoader):
"""Subclass for yaml.Loader which handles storing the search_path and
escape_callback functions for use by the custom YAML objects to find files
and escape the content where required.
Constructor access a list of search paths to look under for the given
file following each tag, taking the first match found. Search path by
default will include the same directory as the yaml file and the current
working directory.
Loading::
# use the load function provided in this module
import local_yaml
data = local_yaml.load(io.open(fn, 'r', encoding='utf-8'))
# Loading by providing the alternate class to the default yaml load
from local_yaml import LocalLoader
data = yaml.load(io.open(fn, 'r', encoding='utf-8'), LocalLoader)
# Loading with a search path
from local_yaml import LocalLoader
import functools
data = yaml.load(io.open(fn, 'r', encoding='utf-8'),
functools.partial(LocalLoader, search_path=['path']))
"""
def __init__(self, *args, **kwargs):
# make sure to pop off any local settings before passing to
# the parent constructor as any unknown args may cause errors.
self.search_path = list()
if "search_path" in kwargs:
for p in kwargs.pop("search_path"):
logger.debug("Adding '{0}' to search path for include tags".format(p))
self.search_path.append(os.path.normpath(p))
if "escape_callback" in kwargs:
self.escape_callback = kwargs.pop("escape_callback")
else:
self.escape_callback = self._escape
super(LocalLoader, self).__init__(*args, **kwargs)
# constructor to preserve order of maps and ensure that the order of
# keys returned is consistent across multiple python versions
self.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
type(self).construct_yaml_map,
)
if hasattr(self.stream, "name"):
self.search_path.append(os.path.normpath(os.path.dirname(self.stream.name)))
self.search_path.append(os.path.normpath(os.path.curdir))
def _escape(self, data):
return re.sub(r"({|})", r"\1\1", data)
class LocalDumper(OrderedRepresenter, yaml.Dumper):
def __init__(self, *args, **kwargs):
super(LocalDumper, self).__init__(*args, **kwargs)
# representer to ensure conversion back looks like normal
# mapping and hides that we use OrderedDict internally
self.add_representer(OrderedDict, type(self).represent_yaml_mapping)
# convert any tuples to lists as the JJB input is generally
# in list format
self.add_representer(tuple, type(self).represent_list)
class BaseYAMLObject(YAMLObject):
yaml_loader = LocalLoader
yaml_dumper = LocalDumper
class J2Yaml(BaseYAMLObject):
yaml_tag = "!j2-yaml:"
@classmethod
def from_yaml(cls, loader, node):
return Jinja2YamlLoader(node.value, loader.search_path)
class J2String(BaseYAMLObject):
yaml_tag = "!j2:"
@classmethod
def from_yaml(cls, loader, node):
return Jinja2Loader(node.value, loader.search_path)
class YamlListJoin(BaseYAMLObject):
yaml_tag = "!join:"
@classmethod
def from_yaml(cls, loader, node):
if isinstance(node, yaml.SequenceNode):
delimiter = node.value[0].value
if not isinstance(node.value[1], yaml.SequenceNode):
raise yaml.constructor.ConstructorError(
None,
None,
"expected sequence node for join data, but "
"found %s" % node.value[1].id,
node.start_mark,
)
return delimiter.join((v.value for v in node.value[1].value))
else:
raise yaml.constructor.ConstructorError(
None,
None,
"expected sequence node, but found %s" % node.id,
node.start_mark,
)
class YamlInclude(BaseYAMLObject):
yaml_tag = "!include:"
@classmethod
def _find_file(cls, filename, search_path):
for dirname in search_path:
candidate = os.path.expanduser(os.path.join(dirname, filename))
if os.path.isfile(candidate):
logger.debug(
"Including file '{0}' from path '{1}'".format(filename, dirname)
)
return candidate
return filename
@classmethod
def _open_file(cls, loader, node):
node_str = loader.construct_yaml_str(node)
try:
node_str.format()
except KeyError:
return cls._lazy_load(loader, cls.yaml_tag, node)
filename = cls._find_file(node_str, loader.search_path)
try:
with io.open(filename, "r", encoding="utf-8") as f:
return f.read()
except Exception:
logger.error(
"Failed to include file using search path: '{0}'".format(
":".join(loader.search_path)
)
)
raise
@classmethod
def _from_file(cls, loader, node):
contents = cls._open_file(loader, node)
if isinstance(contents, LazyLoader):
return contents
data = yaml.load(
contents, functools.partial(cls.yaml_loader, search_path=loader.search_path)
)
return data
@classmethod
def _lazy_load(cls, loader, tag, node_str):
logger.info("Lazy loading of file template '{0}' enabled".format(node_str))
return LazyLoader((cls, loader, node_str))
@classmethod
def from_yaml(cls, loader, node):
if isinstance(node, yaml.ScalarNode):
return cls._from_file(loader, node)
elif isinstance(node, yaml.SequenceNode):
contents = [
cls._from_file(loader, scalar_node) for scalar_node in node.value
]
if any(isinstance(s, CustomLoader) for s in contents):
return CustomLoaderCollection(contents)
return "\n".join(contents)
else:
raise yaml.constructor.ConstructorError(
None,
None,
"expected either a sequence or scalar node, but " "found %s" % node.id,
node.start_mark,
)
class YamlIncludeRaw(YamlInclude):
yaml_tag = "!include-raw:"
@classmethod
def _from_file(cls, loader, node):
return cls._open_file(loader, node)
class YamlIncludeRawEscape(YamlIncludeRaw):
yaml_tag = "!include-raw-escape:"
@classmethod
def from_yaml(cls, loader, node):
data = YamlIncludeRaw.from_yaml(loader, node)
if isinstance(data, LazyLoader):
logger.warning(
"Replacing %s tag with %s since lazy loading means "
"file contents will not be deep formatted for "
"variable substitution.",
cls.yaml_tag,
YamlIncludeRaw.yaml_tag,
)
return data
else:
return loader.escape_callback(data)
class YamlIncludeJinja2(YamlIncludeRaw):
yaml_tag = "!include-jinja2:"
@classmethod
def _from_file(cls, loader, node):
contents = cls._open_file(loader, node)
if isinstance(contents, LazyLoader):
return contents
return Jinja2Loader(contents, loader.search_path)
class DeprecatedTag(BaseYAMLObject):
@classmethod
def from_yaml(cls, loader, node):
logger.warning(
"tag '%s' is deprecated, switch to using '%s'",
cls.yaml_tag,
cls._new.yaml_tag,
)
return cls._new.from_yaml(loader, node)
class YamlIncludeDeprecated(DeprecatedTag):
yaml_tag = "!include"
_new = YamlInclude
class YamlIncludeRawDeprecated(DeprecatedTag):
yaml_tag = "!include-raw"
_new = YamlIncludeRaw
class YamlIncludeRawEscapeDeprecated(DeprecatedTag):
yaml_tag = "!include-raw-escape"
_new = YamlIncludeRawEscape
class CustomLoader(object):
"""Parent class for non-standard loaders."""
class Jinja2Loader(CustomLoader):
"""A loader for Jinja2-templated files."""
def __init__(self, contents, search_path):
# capture template contents and search paths on loader creation.
self._contents = contents
self._search_path = search_path
self._template = None
self._loader = None
def __deepcopy__(self, memo):
# Jinja 2 templates are not deepcopy-able so just pass around
# the search_path and contents.
return Jinja2Loader(self._contents, self._search_path)
def format(self, **kwargs):
# Wait until first render call to create a template then save
# the template on this instance for faster rendering.
if not self._template:
self._template = jinja2.Template(self._contents)
self._template.environment.undefined = jinja2.StrictUndefined
self._template.environment.loader = jinja2.FileSystemLoader(
self._search_path
)
# Preserve this loader if it hasn't been overwritten
# elsewhere.
self._loader = self._template.environment.loader
self._template.environment.loader = self._loader
return self._template.render(kwargs)
def get_object_to_format(self):
return self
class LateYamlLoader(CustomLoader):
"""A loader for data rendered via Jinja2, to be loaded as YAML and then deep formatted."""
def __init__(self, yaml_str, loader):
self._yaml_str = yaml_str
self._loader = loader
def __deepcopy__(self, memo):
return LateYamlLoader(self._yaml_str, copy.deepcopy(self._loader, memo))
def get_object_to_format(self):
return yaml.load(
self._yaml_str,
functools.partial(LocalLoader, search_path=self._loader._search_path),
)
class Jinja2YamlLoader(Jinja2Loader):
def format(self, **kwargs):
yaml_str = super(Jinja2YamlLoader, self).format(**kwargs)
return LateYamlLoader(yaml_str, self)
def __deepcopy__(self, memo):
return Jinja2YamlLoader(self._contents, self._search_path)
class CustomLoaderCollection(object):
"""Helper class to format a collection of CustomLoader objects"""
def __init__(self, sequence):
self._data = sequence
def format(self, *args, **kwargs):
return "\n".join(item.format(*args, **kwargs) for item in self._data)
class LazyLoader(CustomLoader):
"""Helper class to provide lazy loading of files included using !include*
tags where the path to the given file contains unresolved placeholders.
"""
def __init__(self, data):
# str subclasses can only have one argument, so assume it is a tuple
# being passed and unpack as needed
self._cls, self._loader, self._node = data
def __str__(self):
return "%s %s" % (self._cls.yaml_tag, self._node.value)
def __repr__(self):
return "%s %s" % (self._cls.yaml_tag, self._node.value)
def __deepcopy__(self, memodict={}):
return LazyLoader(
(copy.deepcopy(self._cls), self._loader, copy.deepcopy(self._node))
)
def format(self, *args, **kwargs):
node = yaml.ScalarNode(
tag=self._node.tag, value=self._node.value.format(*args, **kwargs)
)
return self._cls.from_yaml(self._loader, node)
def load(stream, retain_anchors=False, **kwargs):
if not retain_anchors:
LocalAnchorLoader.reset_anchors()
return yaml.load(stream, functools.partial(LocalLoader, **kwargs))
def dump(data, stream=None, **kwargs):
return yaml.dump(data, stream, Dumper=LocalDumper, **kwargs)

56
jenkins_jobs/macro.py Normal file
View File

@ -0,0 +1,56 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dataclasses import dataclass
from functools import partial
from .errors import JenkinsJobsException
macro_specs = [
# type_name, elements_name (aka component_type, component_list_type for Registry).
("parameter", "parameters"),
("property", "properties"),
("builder", "builders"),
("wrapper", "wrappers"),
("trigger", "triggers"),
("publisher", "publishers"),
("scm", "scm"),
("pipeline-scm", "pipeline-scm"),
("reporter", "reporters"),
]
@dataclass
class Macro:
name: str
elements: list
@classmethod
def add(
cls, type_name, elements_name, config, roots, expander, params_expander, data
):
d = {**data}
name = d.pop("name")
elements = d.pop(elements_name)
if d:
raise JenkinsJobsException(
f"Macro {type_name} {name!r}: unexpected elements: {','.join(d.keys())}"
)
macro = cls(name, elements or [])
roots.assign(roots.macros[type_name], name, macro, "macro")
macro_adders = {
macro_type: partial(Macro.add, macro_type, elements_name)
for macro_type, elements_name in macro_specs
}

View File

@ -46,15 +46,15 @@ class Base(object):
def __init__(self, registry):
self.registry = registry
def handle_data(self, job_data):
def amend_job_dict(self, job):
"""This method is called before any XML is generated. By
overriding this method, a module may arbitrarily modify a data
structure which will probably be the JJB YamlParser's intermediate data
overriding this method, a module may arbitrarily modify a job data
structure which will probably be the JJB Job intermediate data dict
representation. If it has changed the data structure at all, it must
return ``True``, otherwise, it must return ``False``.
:arg dict job_data: the intermediate representation of job data
loaded from JJB Yaml files without variables interpolation or other
:arg dict job: the intermediate representation of job data
loaded from JJB Yaml files after variables interpolation and other
yaml expansions.
:rtype: bool

View File

@ -1629,14 +1629,13 @@ class Parameters(jenkins_jobs.modules.base.Base):
# Macro parameter without arguments
param = {param: {}}
param_type = next(iter(param))
component = self.registry.parser_data.get("parameter", {}).get(
param_type
)
if component is None:
macro_dict = self.registry.macros.get("parameter", {})
macro = macro_dict.get(param_type)
if not macro:
self._extend_uno_choice_param_data(param, param_type, data)
else:
# Process macro case.
for macro_param in component.get("parameters", []):
for macro_param in macro.elements:
for macro_param_type in macro_param:
self._extend_uno_choice_param_data(
macro_param, macro_param_type, data

View File

@ -1875,7 +1875,7 @@ def build_result(registry, xml_parent, data):
"BuildResultTriggerInfo",
)
jobs_string = ",".join(group["jobs"])
mapping = [("", "jobNames", jobs_string, group)]
mapping = [("", "jobNames", jobs_string)]
helpers.convert_mapping_to_xml(brti, group, mapping, fail_required=True)
checked_results = XML.SubElement(brti, "checkedResults")
for result in group.get("results", ["success"]):

View File

@ -20,7 +20,6 @@ have. It is entirely optional, Zuul 2.0+ pass the parameters over Gearman.
https://opendev.org/zuul/zuul/src/tag/2.6.0/doc/source/launchers.rst#user-content-zuul-parameters
"""
import itertools
import jenkins_jobs.modules.base
@ -187,27 +186,16 @@ ZUUL_POST_PARAMETERS = [
class Zuul(jenkins_jobs.modules.base.Base):
sequence = 0
def handle_data(self, job_data):
changed = False
jobs = itertools.chain(
job_data.get("job", {}).values(), job_data.get("job-template", {}).values()
)
for job in jobs:
triggers = job.get("triggers")
if not triggers:
continue
def amend_job_dict(self, job):
triggers = job.get("triggers", [])
if "zuul" not in triggers and "zuul-post" not in triggers:
return False
if "zuul" not in job.get("triggers", []) and "zuul-post" not in job.get(
"triggers", []
):
continue
if "parameters" not in job:
job["parameters"] = []
if "zuul" in job.get("triggers", []):
job["parameters"].extend(ZUUL_PARAMETERS)
job["triggers"].remove("zuul")
if "zuul-post" in job.get("triggers", []):
job["parameters"].extend(ZUUL_POST_PARAMETERS)
job["triggers"].remove("zuul-post")
changed = True
return changed
parameters = job.setdefault("parameters", [])
if "zuul" in triggers:
parameters.extend(ZUUL_PARAMETERS)
triggers.remove("zuul")
if "zuul-post" in triggers:
parameters.extend(ZUUL_POST_PARAMETERS)
triggers.remove("zuul-post")
return True

View File

@ -1,590 +0,0 @@
#!/usr/bin/env python
# Copyright (C) 2015 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Manage JJB yaml feature implementation
import copy
import fnmatch
import io
import itertools
import logging
import re
import os
from jenkins_jobs.constants import MAGIC_MANAGE_STRING
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.formatter import deep_format
import jenkins_jobs.local_yaml as local_yaml
from jenkins_jobs import utils
__all__ = ["YamlParser"]
logger = logging.getLogger(__name__)
def matches(what, glob_patterns):
"""
Checks if the given string, ``what``, matches any of the glob patterns in
the iterable, ``glob_patterns``
:arg str what: String that we want to test if it matches a pattern
:arg iterable glob_patterns: glob patterns to match (list, tuple, set,
etc.)
"""
return any(fnmatch.fnmatch(what, glob_pattern) for glob_pattern in glob_patterns)
def combination_matches(combination, match_combinations):
"""
Checks if the given combination is matches for any of the given combination
globs, being those a set of combinations where if a key is missing, it's
considered matching
(key1=2, key2=3)
would match the combination match:
(key2=3)
but not:
(key1=2, key2=2)
"""
for cmatch in match_combinations:
for key, val in combination.items():
if cmatch.get(key, val) != val:
break
else:
return True
return False
class YamlParser(object):
def __init__(self, jjb_config=None):
self.data = {}
self.jobs = []
self.views = []
self.jjb_config = jjb_config
self.keep_desc = jjb_config.yamlparser["keep_descriptions"]
self.path = jjb_config.yamlparser["include_path"]
def load_files(self, fn):
# handle deprecated behavior, and check that it's not a file like
# object as these may implement the '__iter__' attribute.
if not hasattr(fn, "__iter__") or hasattr(fn, "read"):
logger.warning(
"Passing single elements for the `fn` argument in "
"Builder.load_files is deprecated. Please update your code "
"to use a list as support for automatic conversion will be "
"removed in a future version."
)
fn = [fn]
files_to_process = []
for path in fn:
if not hasattr(path, "read") and os.path.isdir(path):
files_to_process.extend(
[
os.path.join(path, f)
for f in sorted(os.listdir(path))
if (f.endswith(".yml") or f.endswith(".yaml"))
]
)
else:
files_to_process.append(path)
# symlinks used to allow loading of sub-dirs can result in duplicate
# definitions of macros and templates when loading all from top-level
unique_files = []
for f in files_to_process:
if hasattr(f, "read"):
unique_files.append(f)
continue
rpf = os.path.realpath(f)
if rpf not in unique_files:
unique_files.append(rpf)
else:
logger.warning(
"File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf)
)
for in_file in unique_files:
# use of ask-for-permissions instead of ask-for-forgiveness
# performs better when low use cases.
if hasattr(in_file, "name"):
fname = in_file.name
else:
fname = in_file
logger.debug("Parsing YAML file {0}".format(fname))
if hasattr(in_file, "read"):
self._parse_fp(in_file)
else:
self.parse(in_file)
def _parse_fp(self, fp):
# wrap provided file streams to ensure correct encoding used
data = local_yaml.load(
utils.wrap_stream(fp),
self.jjb_config.yamlparser["retain_anchors"],
search_path=self.path,
)
if data:
if not isinstance(data, list):
raise JenkinsJobsException(
"The topmost collection in file '{fname}' must be a list,"
" not a {cls}".format(fname=getattr(fp, "name", fp), cls=type(data))
)
for item in data:
cls, dfn = next(iter(item.items()))
group = self.data.get(cls, {})
if len(item.items()) > 1:
n = None
for k, v in item.items():
if k == "name":
n = v
break
# Syntax error
raise JenkinsJobsException(
"Syntax error, for item "
"named '{0}'. Missing indent?".format(n)
)
# allow any entry to specify an id that can also be used
_id = dfn.get("id", dfn["name"])
if _id in group:
self._handle_dups(
"Duplicate entry found in '{0}: '{1}' already "
"defined".format(fp.name, _id)
)
group[_id] = dfn
self.data[cls] = group
def parse(self, fn):
with io.open(fn, "r", encoding="utf-8") as fp:
self._parse_fp(fp)
def _handle_dups(self, message):
if not self.jjb_config.yamlparser["allow_duplicates"]:
logger.error(message)
raise JenkinsJobsException(message)
else:
logger.warning(message)
def _getJob(self, name):
job = self.data.get("job", {}).get(name, None)
if not job:
return job
return self._applyDefaults(job)
def _getJobGroup(self, name):
return self.data.get("job-group", {}).get(name, None)
def _getJobTemplate(self, name):
job = self.data.get("job-template", {}).get(name, None)
if not job:
return job
return self._applyDefaults(job)
def _applyDefaults(self, data, override_dict=None):
if override_dict is None:
override_dict = {}
whichdefaults = data.get("defaults", "global")
defaults = copy.deepcopy(self.data.get("defaults", {}).get(whichdefaults, {}))
if defaults == {} and whichdefaults != "global":
raise JenkinsJobsException(
"Unknown defaults set: '{0}'".format(whichdefaults)
)
for key in override_dict.keys():
if key in defaults.keys():
defaults[key] = override_dict[key]
newdata = {}
newdata.update(defaults)
newdata.update(data)
return newdata
def _formatDescription(self, job):
if self.keep_desc:
description = job.get("description", None)
else:
description = job.get("description", "")
if description is not None:
job["description"] = description + self._get_managed_string().lstrip()
def _getfullname(self, data):
if "folder" in data:
return "%s/%s" % (data["folder"], data["name"])
return data["name"]
def expandYaml(self, registry, jobs_glob=None):
changed = True
while changed:
changed = False
for module in registry.modules:
if hasattr(module, "handle_data"):
if module.handle_data(self.data):
changed = True
for job in self.data.get("job", {}).values():
job = self._applyDefaults(job)
job["name"] = self._getfullname(job)
if jobs_glob and not matches(job["name"], jobs_glob):
logger.debug("Ignoring job {0}".format(job["name"]))
continue
logger.debug("Expanding job '{0}'".format(job["name"]))
self._formatDescription(job)
self.jobs.append(job)
for view in self.data.get("view", {}).values():
view["name"] = self._getfullname(view)
if jobs_glob and not matches(view["name"], jobs_glob):
logger.debug("Ignoring view {0}".format(view["name"]))
continue
logger.debug("Expanding view '{0}'".format(view["name"]))
self._formatDescription(view)
self.views.append(view)
for project in self.data.get("project", {}).values():
logger.debug("Expanding project '{0}'".format(project["name"]))
# use a set to check for duplicate job references in projects
seen = set()
for jobspec in project.get("jobs", []):
if isinstance(jobspec, dict):
# Singleton dict containing dict of job-specific params
jobname, jobparams = next(iter(jobspec.items()))
if not isinstance(jobparams, dict):
jobparams = {}
else:
jobname = jobspec
jobparams = {}
job = self._getJob(jobname)
if job:
# Just naming an existing defined job
if jobname in seen:
self._handle_dups(
"Duplicate job '{0}' specified "
"for project '{1}'".format(jobname, project["name"])
)
seen.add(jobname)
continue
# see if it's a job group
group = self._getJobGroup(jobname)
if group:
for group_jobspec in group["jobs"]:
if isinstance(group_jobspec, dict):
group_jobname, group_jobparams = next(
iter(group_jobspec.items())
)
if not isinstance(group_jobparams, dict):
group_jobparams = {}
else:
group_jobname = group_jobspec
group_jobparams = {}
job = self._getJob(group_jobname)
if job:
if group_jobname in seen:
self._handle_dups(
"Duplicate job '{0}' specified for "
"project '{1}'".format(
group_jobname, project["name"]
)
)
seen.add(group_jobname)
continue
template = self._getJobTemplate(group_jobname)
# Allow a group to override parameters set by a project
d = type(project)(project)
d.update(jobparams)
d.update(group)
d.update(group_jobparams)
# Except name, since the group's name is not useful
d["name"] = project["name"]
if template:
self._expandYamlForTemplateJob(d, template, jobs_glob)
continue
# see if it's a template
template = self._getJobTemplate(jobname)
if template:
d = type(project)(project)
d.update(jobparams)
self._expandYamlForTemplateJob(d, template, jobs_glob)
else:
raise JenkinsJobsException(
"Failed to find suitable "
"template named '{0}'".format(jobname)
)
for viewspec in project.get("views", []):
if isinstance(viewspec, dict):
# Singleton dict containing dict of view-specific params
viewname, viewparams = next(iter(viewspec.items()))
if not isinstance(viewparams, dict):
viewparams = {}
else:
viewname = viewspec
viewparams = {}
view = self._getView(viewname)
if view:
# Just naming an existing defined view
if viewname in seen:
self._handle_dups(
"Duplicate view '{0}' specified "
"for project '{1}'".format(viewname, project["name"])
)
seen.add(viewname)
continue
# see if it's a view group
group = self._getViewGroup(viewname)
if group:
for group_viewspec in group["views"]:
if isinstance(group_viewspec, dict):
group_viewname, group_viewparams = next(
iter(group_viewspec.items())
)
if not isinstance(group_viewparams, dict):
group_viewparams = {}
else:
group_viewname = group_viewspec
group_viewparams = {}
view = self._getView(group_viewname)
if view:
if group_viewname in seen:
self._handle_dups(
"Duplicate view '{0}' specified for "
"project '{1}'".format(
group_viewname, project["name"]
)
)
seen.add(group_viewname)
continue
template = self._getViewTemplate(group_viewname)
# Allow a group to override parameters set by a project
d = type(project)(project)
d.update(viewparams)
d.update(group)
d.update(group_viewparams)
# Except name, since the group's name is not useful
d["name"] = project["name"]
if template:
self._expandYamlForTemplateView(d, template, jobs_glob)
continue
# see if it's a template
template = self._getViewTemplate(viewname)
if template:
d = type(project)(project)
d.update(viewparams)
self._expandYamlForTemplateView(d, template, jobs_glob)
else:
raise JenkinsJobsException(
"Failed to find suitable "
"template named '{0}'".format(viewname)
)
# check for duplicate generated jobs
seen = set()
# walk the list in reverse so that last definition wins
for job in self.jobs[::-1]:
if job["name"] in seen:
self._handle_dups(
"Duplicate definitions for job '{0}' "
"specified".format(job["name"])
)
self.jobs.remove(job)
seen.add(job["name"])
# check for duplicate generated views
seen_views = set()
# walk the list in reverse so that last definition wins
for view in self.views[::-1]:
if view["name"] in seen_views:
self._handle_dups(
"Duplicate definitions for view '{0}' "
"specified".format(view["name"])
)
self.views.remove(view)
seen_views.add(view["name"])
return self.jobs, self.views
def _expandYamlForTemplateJob(self, project, template, jobs_glob=None):
dimensions = []
template_name = template["name"]
# reject keys that are not useful during yaml expansion
for k in ["jobs"]:
project.pop(k)
excludes = project.pop("exclude", [])
for (k, v) in project.items():
tmpk = "{{{0}}}".format(k)
if tmpk not in template_name:
continue
if type(v) == list:
dimensions.append(zip([k] * len(v), v))
# XXX somewhat hackish to ensure we actually have a single
# pass through the loop
if len(dimensions) == 0:
dimensions = [(("", ""),)]
for values in itertools.product(*dimensions):
params = copy.deepcopy(project)
params = self._applyDefaults(params, template)
params["template-name"] = re.sub(r"({|})", r"\1\1", template_name)
try:
expanded_values = {}
for (k, v) in values:
if isinstance(v, dict):
inner_key = next(iter(v))
expanded_values[k] = inner_key
expanded_values.update(v[inner_key])
else:
expanded_values[k] = v
except TypeError:
project_name = project.pop("name")
logger.error(
"Exception thrown while expanding template '%s' for "
"project '%s', with expansion arguments of:\n%s\n"
"Original project input variables for template:\n%s\n"
"Most likely the inputs have items indented incorrectly "
"to describe how they should be applied.\n\nNote yaml "
"'null' is mapped to python's 'None'",
template_name,
project_name,
"".join(
local_yaml.dump({k: v}, default_flow_style=False)
for (k, v) in values
),
local_yaml.dump(project, default_flow_style=False),
)
raise
params.update(expanded_values)
try:
params = deep_format(params, params)
except Exception:
logging.error("Failure formatting params '%s' with itself", params)
raise
if combination_matches(params, excludes):
logger.debug("Excluding combination %s", str(params))
continue
for key in template.keys():
if key not in params:
params[key] = template[key]
try:
expanded = deep_format(
template,
params,
self.jjb_config.yamlparser["allow_empty_variables"],
)
except Exception:
logging.error(
"Failure formatting template '%s', containing '%s' with "
"params '%s'",
template_name,
template,
params,
)
raise
expanded["name"] = self._getfullname(expanded)
job_name = expanded.get("name")
if jobs_glob and not matches(job_name, jobs_glob):
continue
self._formatDescription(expanded)
self.jobs.append(expanded)
def _get_managed_string(self):
# The \n\n is not hard coded, because they get stripped if the
# project does not otherwise have a description.
return "\n\n" + MAGIC_MANAGE_STRING
# Views related
def _getView(self, name):
view = self.data.get("view", {}).get(name, None)
if not view:
return view
return self._applyDefaults(view)
def _getViewGroup(self, name):
return self.data.get("view-group", {}).get(name, None)
def _getViewTemplate(self, name):
view = self.data.get("view-template", {}).get(name, None)
if not view:
return view
return self._applyDefaults(view)
def _expandYamlForTemplateView(self, project, template, views_glob=None):
dimensions = []
template_name = template["name"]
# reject keys that are not useful during yaml expansion
for k in ["views"]:
project.pop(k)
excludes = project.pop("exclude", [])
for (k, v) in project.items():
tmpk = "{{{0}}}".format(k)
if tmpk not in template_name:
continue
if type(v) == list:
dimensions.append(zip([k] * len(v), v))
# XXX somewhat hackish to ensure we actually have a single
# pass through the loop
if len(dimensions) == 0:
dimensions = [(("", ""),)]
for values in itertools.product(*dimensions):
params = copy.deepcopy(project)
params = self._applyDefaults(params, template)
expanded_values = {}
for (k, v) in values:
if isinstance(v, dict):
inner_key = next(iter(v))
expanded_values[k] = inner_key
expanded_values.update(v[inner_key])
else:
expanded_values[k] = v
params.update(expanded_values)
params = deep_format(params, params)
if combination_matches(params, excludes):
logger.debug("Excluding combination %s", str(params))
continue
for key in template.keys():
if key not in params:
params[key] = template[key]
params["template-name"] = template_name
expanded = deep_format(
template, params, self.jjb_config.yamlparser["allow_empty_variables"]
)
view_name = expanded.get("name")
if views_glob and not matches(view_name, views_glob):
continue
self._formatDescription(expanded)
self.views.append(expanded)

77
jenkins_jobs/project.py Normal file
View File

@ -0,0 +1,77 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dataclasses import dataclass
from .root_base import GroupBase
@dataclass
class Project(GroupBase):
_jobs: dict
_job_templates: dict
_job_groups: dict
_views: dict
_view_templates: dict
_view_groups: dict
name: str
defaults_name: str
job_specs: list # list[Spec]
view_specs: list # list[Spec]
params: dict
@classmethod
def add(cls, config, roots, expander, params_expander, data):
d = {**data}
name = d.pop("name")
defaults = d.pop("defaults", None)
job_specs = [
cls._spec_from_dict(item, error_context=f"Project {name}")
for item in d.pop("jobs", [])
]
view_specs = [
cls._spec_from_dict(item, error_context=f"Project {name}")
for item in d.pop("views", [])
]
project = cls(
roots.jobs,
roots.job_templates,
roots.job_groups,
roots.views,
roots.view_templates,
roots.view_groups,
name,
defaults,
job_specs,
view_specs,
params=d,
)
roots.assign(roots.projects, project.name, project, "project")
def __str__(self):
return f"Project {self.name}"
@property
def _my_params(self):
return {"name": self.name}
def generate_jobs(self):
root_dicts = [self._jobs, self._job_templates, self._job_groups]
return self._generate_items(
root_dicts, self.job_specs, self.defaults_name, params={}
)
def generate_views(self):
root_dicts = [self._views, self._view_templates, self._view_groups]
return self._generate_items(
root_dicts, self.view_specs, self.defaults_name, params={}
)

View File

@ -25,8 +25,7 @@ import types
from six import PY2
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.formatter import deep_format
from jenkins_jobs.local_yaml import Jinja2Loader
from jenkins_jobs.expander import Expander, ParamsExpander
__all__ = ["ModuleRegistry"]
@ -45,6 +44,9 @@ class ModuleRegistry(object):
self.handlers = {}
self.jjb_config = jjb_config
self.masked_warned = {}
self._macros = {}
self._expander = Expander(jjb_config)
self._params_expander = ParamsExpander(jjb_config)
if plugins_list is None:
self.plugins_dict = {}
@ -160,11 +162,21 @@ class ModuleRegistry(object):
return self.handlers[category][name]
@property
def parser_data(self):
return self.__parser_data
def macros(self):
return self._macros
def set_parser_data(self, parser_data):
self.__parser_data = parser_data
def set_macros(self, macros):
self._macros = macros
def amend_job_dicts(self, job_data_list):
while True:
changed = False
for data in job_data_list:
for module in self.modules:
if module.amend_job_dict(data):
changed = True
if not changed:
break
def get_component_list_type(self, entry_point):
if entry_point in self._component_type_cache:
@ -211,26 +223,10 @@ class ModuleRegistry(object):
if isinstance(component, dict):
# The component is a singleton dictionary of name: dict(args)
name, component_data = next(iter(component.items()))
if template_data or isinstance(component_data, Jinja2Loader):
if template_data:
paramdict = {}
paramdict.update(template_data)
paramdict.update(job_data or {})
# Template data contains values that should be interpolated
# into the component definition. To handle Jinja2 templates
# that don't contain any variables, we also deep format those.
try:
component_data = deep_format(
component_data,
paramdict,
self.jjb_config.yamlparser["allow_empty_variables"],
)
except Exception:
logging.error(
"Failure formatting component ('%s') data '%s'",
name,
component_data,
)
raise
else:
# The component is a simple string name, eg "run-tests"
name = component
@ -307,9 +303,9 @@ class ModuleRegistry(object):
self._entry_points_cache[component_list_type] = eps
logger.debug("Cached entry point group %s = %s", component_list_type, eps)
# check for macro first
component = self.parser_data.get(component_type, {}).get(name)
if component:
macro_dict = self.macros.get(component_type, {})
macro = macro_dict.get(name)
if macro:
if name in eps and name not in self.masked_warned:
self.masked_warned[name] = True
logger.warning(
@ -318,12 +314,28 @@ class ModuleRegistry(object):
"definition" % (name, component_type)
)
for b in component[component_list_type]:
# Expand macro strings only if at least one macro parameter is provided.
if component_data:
expander = self._params_expander
else:
expander = self._expander
for b in macro.elements:
try:
element = expander.expand(
b, params={**component_data, **(job_data or {})}
)
except JenkinsJobsException as x:
raise JenkinsJobsException(f"While expanding macro {name!r}: {x}")
# Pass component_data in as template data to this function
# so that if the macro is invoked with arguments,
# the arguments are interpolated into the real defn.
self.dispatch(
component_type, xml_parent, b, component_data, job_data=job_data
component_type,
xml_parent,
element,
component_data,
job_data=job_data,
)
elif name in eps:
func = eps[name]

195
jenkins_jobs/root_base.py Normal file
View File

@ -0,0 +1,195 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import namedtuple
from dataclasses import dataclass
from .constants import MAGIC_MANAGE_STRING
from .errors import JenkinsJobsException
from .formatter import enum_str_format_required_params, enum_str_format_param_defaults
from .expander import Expander, expand_parameters
from .defaults import Defaults
from .dimensions import DimensionsExpander
@dataclass
class RootBase:
"""Base class for YAML root elements - job, view or template"""
_defaults: dict
_expander: Expander
_keep_descriptions: bool
_id: str
name: str
description: str
defaults_name: str
params: dict
contents: dict
@property
def id(self):
if self._id:
return self._id
else:
return self.name
def _format_description(self, params):
if self.description is None:
defaults = self._pick_defaults(self.defaults_name)
description = defaults.params.get("description")
else:
description = self.description
if description is None and self._keep_descriptions:
return {}
expanded_desc = self._expander.expand(description, params)
return {"description": (expanded_desc or "") + MAGIC_MANAGE_STRING}
def _pick_defaults(self, name, merge_global=True):
try:
defaults = self._defaults[name]
except KeyError:
if name == "global":
return Defaults.empty()
raise JenkinsJobsException(
f"Job template {self.name!r} wants defaults {self.defaults_name!r}"
" but it was never defined"
)
if name == "global":
return defaults
if merge_global:
return defaults.merged_with_global(self._pick_defaults("global"))
else:
return defaults
class NonTemplateRootMixin:
def top_level_generate_items(self):
defaults = self._pick_defaults(self.defaults_name, merge_global=False)
description = self._format_description(params={})
data = self._as_dict()
contents = self._expander.expand(data, self.params)
yield {
**defaults.contents,
**contents,
**description,
}
def generate_items(self, defaults_name, params):
# Do not produce jobs/views from under project - they are produced when
# processed directly from roots, by top_level_generate_items.
return []
class TemplateRootMixin:
def generate_items(self, defaults_name, params):
defaults = self._pick_defaults(defaults_name or self.defaults_name)
item_params = {
**defaults.params,
**self.params,
**params,
"template-name": self.name,
}
if self._id:
item_params["id"] = self._id
contents = {
**defaults.contents,
**self._as_dict(),
}
axes = list(enum_str_format_required_params(self.name))
axes_defaults = dict(enum_str_format_param_defaults(self.name))
dim_expander = DimensionsExpander(context=self.name)
for dim_params in dim_expander.enum_dimensions_params(
axes, item_params, axes_defaults
):
instance_params = {
**item_params,
**dim_params,
}
expanded_params = expand_parameters(
self._expander, instance_params, template_name=self.name
)
exclude_list = expanded_params.get("exclude")
if not dim_expander.is_point_included(exclude_list, expanded_params):
continue
description = self._format_description(expanded_params)
expanded_contents = self._expander.expand(contents, expanded_params)
yield {
**expanded_contents,
**description,
}
class GroupBase:
Spec = namedtuple("Spec", "name params")
def __repr__(self):
return f"<{self}>"
@classmethod
def _spec_from_dict(cls, d, error_context):
if isinstance(d, str):
return cls.Spec(d, params={})
if not isinstance(d, dict):
raise JenkinsJobsException(
f"{error_context}: Job/view spec should name or dict,"
f" but is {type(d)}. Missing indent?"
)
if len(d) != 1:
raise JenkinsJobsException(
f"{error_context}: Job/view dict should be single-item,"
f" but have keys {list(d.keys())}. Missing indent?"
)
name, params = next(iter(d.items()))
if params is None:
params = {}
else:
if not isinstance(params, dict):
raise JenkinsJobsException(
f"{error_context}: Job/view {name} params type should be dict,"
f" but is {type(params)} ({params})."
)
return cls.Spec(name, params)
def _generate_items(self, root_dicts, spec_list, defaults_name, params):
for spec in spec_list:
item = self._pick_item(root_dicts, spec.name)
item_params = {
**params,
**self.params,
**self._my_params,
**spec.params,
}
yield from item.generate_items(defaults_name, item_params)
@property
def _my_params(self):
return {}
def _pick_item(self, root_dict_list, name):
for roots_dict in root_dict_list:
try:
return roots_dict[name]
except KeyError:
pass
raise JenkinsJobsException(
f"{self}: Failed to find suitable job/view/template named '{name}'"
)
@dataclass
class Group(GroupBase):
name: str
specs: list # list[Spec]
params: dict
def generate_items(self, defaults_name, params):
return self._generate_items(self._root_dicts, self.specs, defaults_name, params)

95
jenkins_jobs/roots.py Normal file
View File

@ -0,0 +1,95 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from collections import defaultdict
from .errors import JenkinsJobsException
from .defaults import Defaults
from .job import Job, JobTemplate, JobGroup
from .view import View, ViewTemplate, ViewGroup
from .project import Project
from .macro import macro_adders
logger = logging.getLogger(__name__)
root_adders = {
"defaults": Defaults.add,
"job": Job.add,
"job-template": JobTemplate.add,
"job-group": JobGroup.add,
"view": View.add,
"view-template": ViewTemplate.add,
"view-group": ViewGroup.add,
"project": Project.add,
**macro_adders,
}
class Roots:
"""Container for root YAML elements - jobs, views, templates, projects and macros"""
def __init__(self, config):
self._allow_duplicates = config.yamlparser["allow_duplicates"]
self.defaults = {}
self.jobs = {}
self.job_templates = {}
self.job_groups = {}
self.views = {}
self.view_templates = {}
self.view_groups = {}
self.projects = {}
self.macros = defaultdict(dict) # type -> name -> Macro
def generate_jobs(self):
expanded_jobs = []
for job in self.jobs.values():
expanded_jobs += job.top_level_generate_items()
for project in self.projects.values():
expanded_jobs += project.generate_jobs()
return self._remove_duplicates(expanded_jobs)
def generate_views(self):
expanded_views = []
for view in self.views.values():
expanded_views += view.top_level_generate_items()
for project in self.projects.values():
expanded_views += project.generate_views()
return self._remove_duplicates(expanded_views)
def assign(self, container, id, value, title):
if id in container:
self._handle_dups(f"Duplicate {title}: {id}")
container[id] = value
def _remove_duplicates(self, job_list):
seen = set()
unique_list = []
# Last definition wins.
for job in reversed(job_list):
name = job["name"]
if name in seen:
self._handle_dups(
f"Duplicate definitions for job {name!r} specified",
)
else:
unique_list.append(job)
seen.add(name)
return unique_list[::-1]
def _handle_dups(self, message):
if self._allow_duplicates:
logger.warning(message)
else:
logger.error(message)
raise JenkinsJobsException(message)

95
jenkins_jobs/view.py Normal file
View File

@ -0,0 +1,95 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dataclasses import dataclass
from .root_base import RootBase, NonTemplateRootMixin, TemplateRootMixin, Group
from .defaults import split_contents_params, view_contents_keys
@dataclass
class ViewBase(RootBase):
view_type: str
@classmethod
def from_dict(cls, config, roots, expander, data):
keep_descriptions = config.yamlparser["keep_descriptions"]
d = {**data}
name = d.pop("name")
id = d.pop("id", None)
description = d.pop("description", None)
defaults = d.pop("defaults", "global")
view_type = d.pop("view-type", "list")
contents, params = split_contents_params(d, view_contents_keys)
return cls(
roots.defaults,
expander,
keep_descriptions,
id,
name,
description,
defaults,
params,
contents,
view_type,
)
def _as_dict(self):
return {
"name": self.name,
"view-type": self.view_type,
**self.contents,
}
class View(ViewBase, NonTemplateRootMixin):
@classmethod
def add(cls, config, roots, expander, param_expander, data):
view = cls.from_dict(config, roots, expander, data)
roots.assign(roots.views, view.id, view, "view")
class ViewTemplate(ViewBase, TemplateRootMixin):
@classmethod
def add(cls, config, roots, expander, params_expander, data):
template = cls.from_dict(config, roots, params_expander, data)
roots.assign(roots.view_templates, template.id, template, "view template")
@dataclass
class ViewGroup(Group):
_views: dict
_view_templates: dict
@classmethod
def add(cls, config, roots, expander, params_expander, data):
d = {**data}
name = d.pop("name")
view_specs = [
cls._spec_from_dict(item, error_context=f"View group {name}")
for item in d.pop("views")
]
group = cls(
name,
view_specs,
d,
roots.views,
roots.view_templates,
)
roots.assign(roots.view_groups, group.name, group, "view group")
def __str__(self):
return f"View group {self.name}"
@property
def _root_dicts(self):
return [self._views, self._view_templates]

View File

@ -0,0 +1,470 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Provides local yaml parsing classes and extends yaml module.
"""Custom application specific yamls tags are supported to provide
enhancements when reading yaml configuration.
Action Tags
^^^^^^^^^^^
These allow manipulation of data being stored in one layout in the source
yaml for convenience and/or clarity, to another format to be processed by
the targeted module instead of requiring all modules in JJB being capable
of supporting multiple input formats.
The tag ``!join:`` will treat the first element of the following list as
the delimiter to use, when joining the remaining elements into a string
and returning a single string to be consumed by the specified module option.
This allows users to maintain elements of data in a list structure for ease
of review/maintenance, and have the yaml parser convert it to a string for
consumption as any argument for modules. The main expected use case is to
allow for generic plugin data such as shell properties to be populated from
a list construct which the yaml parser converts to a single string, instead
of trying to support this within the module code which would require a
templating engine similar to Jinja.
Generic Example:
.. literalinclude:: /../../tests/loader/fixtures/joinlists.yaml
Environment Inject:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/string_join.yaml
While this mechanism can also be used items where delimiters are supported by
the module, that should be considered a bug that the existing code doesn't
handle being provided a list and delimiter to perform the correct conversion
for you. Should you discover a module that takes arguments with delimiters and
the existing JJB codebase does not handle accepting lists, then this can be
used as a temporary solution in place of using very long strings:
Extended Params Example:
.. literalinclude::
/../../tests/parameters/fixtures/extended-choice-param-full.yaml
Inclusion Tags
^^^^^^^^^^^^^^
These allow inclusion of arbitrary files as a method of having blocks of data
managed separately to the yaml job configurations. A specific usage of this is
inlining scripts contained in separate files, although such tags may also be
used to simplify usage of macros or job templates.
The tag ``!include:`` will treat the following string as file which should be
parsed as yaml configuration data.
Example:
.. literalinclude:: /../../tests/loader/fixtures/include001.yaml
contents of include001.yaml.inc:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/include001.yaml.inc
The tag ``!include-raw:`` will treat the given string or list of strings as
filenames to be opened as one or more data blob, which should be read into
the calling yaml construct without any further parsing. Any data in a file
included through this tag, will be treated as string data.
Examples:
.. literalinclude:: /../../tests/loader/fixtures/include-raw001-job.yaml
contents of include-raw001-hello-world.sh:
.. literalinclude::
/../../tests/loader/fixtures/include-raw001-hello-world.sh
contents of include-raw001-vars.sh:
.. literalinclude::
/../../tests/loader/fixtures/include-raw001-vars.sh
using a list of files:
.. literalinclude::
/../../tests/loader/fixtures/include-raw-multi001.yaml
The tag ``!include-raw-escape:`` treats the given string or list of strings as
filenames to be opened as one or more data blobs, which should be escaped
before being read in as string data. This allows job-templates to use this tag
to include scripts from files without needing to escape braces in the original
file.
.. warning::
When used as a macro ``!include-raw-escape:`` should only be used if
parameters are passed into the escaped file and you would like to escape
those parameters. If the file does not have any jjb parameters passed into
it then ``!include-raw:`` should be used instead otherwise you will run
into an interesting issue where ``include-raw-escape:`` actually adds
additional curly braces around existing curly braces. For example
${PROJECT} becomes ${{PROJECT}} which may break bash scripts.
Examples:
.. literalinclude::
/../../tests/loader/fixtures/include-raw-escaped001-template.yaml
contents of include-raw001-hello-world.sh:
.. literalinclude::
/../../tests/loader/fixtures/include-raw001-hello-world.sh
contents of include-raw001-vars.sh:
.. literalinclude::
/../../tests/loader/fixtures/include-raw001-vars.sh
using a list of files:
.. literalinclude::
/../../tests/loader/fixtures/include-raw-escaped-multi001.yaml
For all the multi file includes, the files are simply appended using a newline
character.
To allow for job templates to perform substitution on the path names, when a
filename containing a python format placeholder is encountered, lazy loading
support is enabled, where instead of returning the contents back during yaml
parsing, it is delayed until the variable substitution is performed.
Example:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/lazy-load-jobs001.yaml
using a list of files:
.. literalinclude::
/../../tests/yamlparser/job_fixtures/lazy-load-jobs-multi001.yaml
.. note::
Because lazy-loading involves performing the substitution on the file
name, it means that jenkins-job-builder can not call the variable
substitution on the contents of the file. This means that the
``!include-raw:`` tag will behave as though ``!include-raw-escape:`` tag
was used instead whenever name substitution on the filename is to be
performed.
Given the behaviour described above, when substitution is to be performed
on any filename passed via ``!include-raw-escape:`` the tag will be
automatically converted to ``!include-raw:`` and no escaping will be
performed.
The tag ``!include-jinja2:`` will treat the given string or list of strings as
filenames to be opened as Jinja2 templates, which should be rendered to a
string and included in the calling YAML construct. (This is analogous to the
templating that will happen with ``!include-raw``.)
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja01.yaml
contents of jinja01.yaml.inc:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja01.yaml.inc
The tag ``!j2:`` takes a string and treats it as a Jinja2 template. It will be
rendered (with the variables in that context) and included in the calling YAML
construct.
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja-string01.yaml
The tag ``!j2-yaml:`` is similar to the ``!j2:`` tag, just that it loads the
Jinja-rendered string as YAML and embeds it in the calling YAML construct. This
provides a very flexible and convenient way of generating pieces of YAML
structures. One of use cases is defining complex YAML structures with much
simpler configuration, without any duplication.
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja-yaml01.yaml
Another use case is controlling lists dynamically, like conditionally adding
list elements based on project configuration.
Examples:
.. literalinclude:: /../../tests/yamlparser/job_fixtures/jinja-yaml02.yaml
"""
import abc
import os.path
import logging
import sys
from pathlib import Path
import jinja2
import jinja2.meta
import yaml
from .errors import JenkinsJobsException
from .formatter import CustomFormatter, enum_str_format_required_params
logger = logging.getLogger(__name__)
if sys.version_info >= (3, 8):
from functools import cached_property
else:
from functools import lru_cache
# cached_property was introduced in python 3.8.
# Recipe from https://stackoverflow.com/a/19979379
def cached_property(fn):
return property(lru_cache()(fn))
class BaseYamlObject(metaclass=abc.ABCMeta):
@staticmethod
def path_list_from_node(loader, node):
if isinstance(node, yaml.ScalarNode):
return [loader.construct_yaml_str(node)]
elif isinstance(node, yaml.SequenceNode):
return loader.construct_sequence(node)
else:
raise yaml.constructor.ConstructorError(
None,
None,
f"expected either a sequence or scalar node, but found {node.id}",
node.start_mark,
)
@classmethod
def from_yaml(cls, loader, node):
value = loader.construct_yaml_str(node)
return cls(loader.jjb_config, loader, value)
def __init__(self, jjb_config, loader):
self._search_path = jjb_config.yamlparser["include_path"]
if loader.source_path:
# Loaded from a file, find includes beside it too.
self._search_path.append(os.path.dirname(loader.source_path))
self._loader = loader
allow_empty = jjb_config.yamlparser["allow_empty_variables"]
self._formatter = CustomFormatter(allow_empty)
@abc.abstractmethod
def expand(self, expander, params):
"""Expand object but do not substitute template parameters"""
pass
def subst(self, expander, params):
"""Expand object and substitute template parameters"""
return self.expand(expander, params)
def _find_file(self, rel_path):
search_path = self._search_path
if "." not in search_path:
search_path.append(".")
dir_list = [Path(d).expanduser() for d in self._search_path]
for dir in dir_list:
candidate = dir.joinpath(rel_path)
if candidate.is_file():
logger.debug("Including file %r from path %r", str(rel_path), str(dir))
return candidate
raise JenkinsJobsException(
f"File {rel_path} does not exist on any of include directories:"
f" {','.join([str(d) for d in dir_list])}"
)
class J2BaseYamlObject(BaseYamlObject):
def __init__(self, jjb_config, loader):
super().__init__(jjb_config, loader)
self._jinja2_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(self._search_path),
undefined=jinja2.StrictUndefined,
)
@staticmethod
def _render_template(template_text, template, params):
try:
return template.render(params)
except jinja2.UndefinedError as x:
if len(template_text) > 40:
text = template_text[:40] + "..."
else:
text = template_text
raise JenkinsJobsException(
f"While formatting jinja2 template {text!r}: {x}"
)
class J2Template(J2BaseYamlObject):
def __init__(self, jjb_config, loader, template_text):
super().__init__(jjb_config, loader)
self._template_text = template_text
self._template = self._jinja2_env.from_string(template_text)
@cached_property
def required_params(self):
ast = self._jinja2_env.parse(self._template_text)
return jinja2.meta.find_undeclared_variables(ast)
def _render(self, params):
return self._render_template(self._template_text, self._template, params)
class J2String(J2Template):
yaml_tag = "!j2:"
def expand(self, expander, params):
return self._render(params)
class J2Yaml(J2Template):
yaml_tag = "!j2-yaml:"
def expand(self, expander, params):
text = self._render(params)
data = self._loader.load(text)
return expander.expand(data, params)
class IncludeJinja2(J2BaseYamlObject):
yaml_tag = "!include-jinja2:"
@classmethod
def from_yaml(cls, loader, node):
path_list = cls.path_list_from_node(loader, node)
return cls(loader.jjb_config, loader, path_list)
def __init__(self, jjb_config, loader, path_list):
super().__init__(jjb_config, loader)
self._path_list = path_list
@property
def required_params(self):
return []
def expand(self, expander, params):
return "\n".join(
self._expand_path(expander, params, path) for path in self._path_list
)
def _expand_path(self, expander, params, path_template):
rel_path = self._formatter.format(path_template, **params)
full_path = self._find_file(rel_path)
template_text = full_path.read_text()
template = self._jinja2_env.from_string(template_text)
return self._render_template(template_text, template, params)
class IncludeBaseObject(BaseYamlObject):
@classmethod
def from_yaml(cls, loader, node):
path_list = cls.path_list_from_node(loader, node)
return cls(loader.jjb_config, loader, path_list)
def __init__(self, jjb_config, loader, path_list):
super().__init__(jjb_config, loader)
self._path_list = path_list
@property
def required_params(self):
for path in self._path_list:
yield from enum_str_format_required_params(path)
class YamlInclude(IncludeBaseObject):
yaml_tag = "!include:"
def expand(self, expander, params):
yaml_list = [
self._expand_path(expander, params, path) for path in self._path_list
]
if len(yaml_list) == 1:
return yaml_list[0]
else:
return "\n".join(yaml_list)
def _expand_path(self, expander, params, path_template):
rel_path = self._formatter.format(path_template, **params)
full_path = self._find_file(rel_path)
text = full_path.read_text()
data = self._loader.load(text)
return expander.expand(data, params)
class IncludeRawBase(IncludeBaseObject):
def expand(self, expander, params):
return "\n".join(self._expand_path(path, params) for path in self._path_list)
def subst(self, expander, params):
return "\n".join(self._subst_path(path, params) for path in self._path_list)
class IncludeRaw(IncludeRawBase):
yaml_tag = "!include-raw:"
def _expand_path(self, rel_path_template, params):
rel_path = self._formatter.format(rel_path_template, **params)
full_path = self._find_file(rel_path)
return full_path.read_text()
def _subst_path(self, rel_path_template, params):
rel_path = self._formatter.format(rel_path_template, **params)
full_path = self._find_file(rel_path)
template = full_path.read_text()
return self._formatter.format(template, **params)
class IncludeRawEscape(IncludeRawBase):
yaml_tag = "!include-raw-escape:"
def _expand_path(self, rel_path_template, params):
rel_path = self._formatter.format(rel_path_template, **params)
full_path = self._find_file(rel_path)
text = full_path.read_text()
# Backward compatibility:
# if used inside job or macro without parameters, curly braces are duplicated.
return text.replace("{", "{{").replace("}", "}}")
def _subst_path(self, rel_path_template, params):
rel_path = self._formatter.format(rel_path_template, **params)
full_path = self._find_file(rel_path)
return full_path.read_text()
class YamlListJoin:
yaml_tag = "!join:"
@classmethod
def from_yaml(cls, loader, node):
value = loader.construct_sequence(node, deep=True)
if len(value) != 2:
raise yaml.constructor.ConstructorError(
None,
None,
"Join value should contain 2 elements: delimiter and string list,"
f" but contains {len(value)} elements: {value!r}",
node.start_mark,
)
delimiter, seq = value
return delimiter.join(seq)

View File

@ -26,8 +26,8 @@ def test_delete_single_job(mocker, default_config_file, execute_jenkins_jobs):
Test handling the deletion of a single Jenkins job.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_views")
mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager.delete_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager.delete_views")
args = ["--conf", default_config_file, "delete", "test_job"]
execute_jenkins_jobs(args)
@ -38,8 +38,8 @@ def test_delete_multiple_jobs(mocker, default_config_file, execute_jenkins_jobs)
Test handling the deletion of multiple Jenkins jobs.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_views")
mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager.delete_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager.delete_views")
args = ["--conf", default_config_file, "delete", "test_job1", "test_job2"]
execute_jenkins_jobs(args)

View File

@ -25,7 +25,7 @@ def test_delete_all_accept(mocker, default_config_file, execute_jenkins_jobs):
Test handling the deletion of a single Jenkins job.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.builder.JenkinsManager.get_views", return_value=[None])
mocker.patch("jenkins_jobs.utils.input", return_value="y")
@ -38,7 +38,7 @@ def test_delete_all_abort(mocker, default_config_file, execute_jenkins_jobs):
Test handling the deletion of a single Jenkins job.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.utils.input", return_value="n")
args = ["--conf", default_config_file, "delete-all"]

View File

@ -191,8 +191,8 @@ def test_plugins_info_stub_option(mocker, fixtures_dir, execute_jenkins_jobs):
"""
Test handling of plugins_info stub option.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
registry_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
mocker.patch("jenkins_jobs.cli.subcommand.base.XmlJobGenerator.generateXML")
registry_mock = mocker.patch("jenkins_jobs.cli.subcommand.base.ModuleRegistry")
plugins_info_stub_yaml_file = fixtures_dir / "plugins-info.yaml"
args = [
@ -218,8 +218,8 @@ def test_bogus_plugins_info_stub_option(
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
mocker.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
mocker.patch("jenkins_jobs.cli.subcommand.base.XmlJobGenerator.generateXML")
mocker.patch("jenkins_jobs.cli.subcommand.base.ModuleRegistry")
plugins_info_stub_yaml_file = fixtures_dir / "bogus-plugins-info.yaml"
args = [

View File

@ -136,7 +136,7 @@ def test_update_timeout_not_set(mocker, fixtures_dir, default_config_file):
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
jenkins_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager")
jenkins_mock = mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager")
path = fixtures_dir / "cmd-002.yaml"
args = ["--conf", default_config_file, "update", str(path)]
@ -159,7 +159,7 @@ def test_update_timeout_set(mocker, fixtures_dir):
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
jenkins_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager")
jenkins_mock = mocker.patch("jenkins_jobs.cli.subcommand.base.JenkinsManager")
path = fixtures_dir / "cmd-002.yaml"
config_file = fixtures_dir / "non-default-timeout.ini"

View File

@ -4,9 +4,11 @@ import xml.etree.ElementTree as XML
from pathlib import Path
import pytest
import yaml
from jenkins_jobs.alphanum import AlphanumSort
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.loader import Loader
from jenkins_jobs.modules import project_externaljob
from jenkins_jobs.modules import project_flow
from jenkins_jobs.modules import project_githuborg
@ -14,10 +16,11 @@ from jenkins_jobs.modules import project_matrix
from jenkins_jobs.modules import project_maven
from jenkins_jobs.modules import project_multibranch
from jenkins_jobs.modules import project_multijob
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJob, XmlJobGenerator, XmlViewGenerator
import jenkins_jobs.local_yaml as yaml
from jenkins_jobs.roots import Roots
from jenkins_jobs.loader import load_files
# Avoid writing to ~/.cache/jenkins_jobs.
@ -59,15 +62,16 @@ def mock_iter_entry_points():
@pytest.fixture
def input(scenario):
return yaml.load(scenario.in_path.read_text())
def input(scenario, jjb_config):
loader = Loader.empty(jjb_config)
return loader.load_path(scenario.in_path)
@pytest.fixture
def plugins_info(scenario):
if not scenario.plugins_info_path.exists():
return None
return yaml.load(scenario.plugins_info_path.read_text())
return yaml.safe_load(scenario.plugins_info_path.read_text())
@pytest.fixture
@ -117,8 +121,11 @@ def expected_error(scenario):
def check_folder(scenario, jjb_config, input):
if "name" not in input:
return
parser = YamlParser(jjb_config)
*dirs, name = parser._getfullname(input).split("/")
if "folder" in input:
full_name = input["folder"] + "/" + input["name"]
else:
full_name = input["name"]
*dirs, name = full_name.split("/")
input_dir = scenario.in_path.parent
expected_out_dirs = [input_dir.joinpath(*dirs)]
actual_out_dirs = [path.parent for path in scenario.out_paths]
@ -127,8 +134,6 @@ def check_folder(scenario, jjb_config, input):
@pytest.fixture
def check_generator(scenario, input, expected_output, jjb_config, registry, project):
registry.set_parser_data({})
def check(Generator):
if project:
xml = project.root_xml(input)
@ -146,26 +151,27 @@ def check_generator(scenario, input, expected_output, jjb_config, registry, proj
@pytest.fixture
def check_parser(jjb_config, registry):
parser = YamlParser(jjb_config)
def check(in_path):
parser.parse(str(in_path))
registry.set_parser_data(parser.data)
job_data_list, job_view_list = parser.expandYaml(registry)
roots = Roots(jjb_config)
load_files(jjb_config, roots, [in_path])
registry.set_macros(roots.macros)
job_data_list = roots.generate_jobs()
view_data_list = roots.generate_views()
generator = XmlJobGenerator(registry)
_ = generator.generateXML(job_data_list)
_ = generator.generateXML(view_data_list)
return check
@pytest.fixture
def check_job(scenario, expected_output, jjb_config, registry):
parser = YamlParser(jjb_config)
def check():
parser.parse(str(scenario.in_path))
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry)
roots = Roots(jjb_config)
load_files(jjb_config, roots, [scenario.in_path])
registry.set_macros(roots.macros)
job_data_list = roots.generate_jobs()
registry.amend_job_dicts(job_data_list)
generator = XmlJobGenerator(registry)
job_xml_list = generator.generateXML(job_data_list)
job_xml_list.sort(key=AlphanumSort)
@ -187,12 +193,11 @@ def check_job(scenario, expected_output, jjb_config, registry):
@pytest.fixture
def check_view(scenario, expected_output, jjb_config, registry):
parser = YamlParser(jjb_config)
def check():
parser.parse(str(scenario.in_path))
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry)
roots = Roots(jjb_config)
load_files(jjb_config, roots, [scenario.in_path])
registry.set_macros(roots.macros)
view_data_list = roots.generate_views()
generator = XmlViewGenerator(registry)
view_xml_list = generator.generateXML(view_data_list)
view_xml_list.sort(key=AlphanumSort)

View File

@ -0,0 +1,215 @@
import pytest
from jinja2 import StrictUndefined
from jenkins_jobs.formatter import (
CustomFormatter,
enum_str_format_required_params,
enum_str_format_param_defaults,
)
class AnObject:
def __init__(self, val):
self.val = val
# Format, kwargs, used kwargs, defaults, result.
cases = [
# Single variable, whole string.
("{abc}", {"abc": "123"}, {"abc"}, {}, "123"),
("{abc:>5}", {"abc": "123"}, {"abc"}, {}, " 123"),
("{abc:d}", {"abc": 123}, {"abc"}, {}, "123"),
("{abc|555}", {"abc": "123"}, {"abc"}, {"abc": "555"}, "123"),
("{abc|555}", {}, {"abc"}, {"abc": "555"}, "555"),
pytest.param(
"{abc|555:d}",
{},
{"abc"},
{"abc": "555"},
"555",
marks=pytest.mark.xfail(reason="Format specifier for default is not supported"),
),
# Retain original object type when whole string is a variable template.
("{obj:abc}", {"abc": "123"}, {"abc"}, {}, "123"),
("{obj:abc}", {"abc": 123}, {"abc"}, {}, 123),
("{abc}", {"abc": 123}, {"abc"}, {}, 123),
("{obj:abc|555}", {"abc": 123}, {"abc"}, {"abc": "555"}, 123),
("{abc|555}", {"abc": 123}, {"abc"}, {"abc": "555"}, 123),
("{obj:abc}", {"abc": [1, 2, 3]}, {"abc"}, {}, [1, 2, 3]),
("{abc}", {"abc": [1, 2, 3]}, {"abc"}, {}, [1, 2, 3]),
("{obj:abc|555}", {}, {"abc"}, {"abc": "555"}, "555"),
# Single variable.
(" {abc} ", {"abc": "123"}, {"abc"}, {}, " 123 "),
(" {abc:<5} ", {"abc": "123"}, {"abc"}, {}, " 123 "),
(" {abc|555} ", {"abc": "123"}, {"abc"}, {"abc": "555"}, " 123 "),
(" {abc|555} ", {}, {"abc"}, {"abc": "555"}, " 555 "),
("x{abc}y", {"abc": "123"}, {"abc"}, {}, "x123y"),
("x {abc} y", {"abc": "123"}, {"abc"}, {}, "x 123 y"),
("x {abc|555} y", {"abc": "123"}, {"abc"}, {"abc": "555"}, "x 123 y"),
# Quoted single variable, while string.
("{{abc}}", {"abc": "123"}, {}, {}, "{abc}"),
("{{abc|555}}", {"abc": "123"}, {}, {}, "{abc|555}"),
("{{obj:abc}}", {"abc": "123"}, {}, {}, "{obj:abc}"),
("{{obj:abc|555}}", {"abc": "123"}, {}, {}, "{obj:abc|555}"),
# Quoted single variable.
(" {{abc}} ", {"abc": "123"}, {}, {}, " {abc} "),
("x{{abc}}y", {"abc": "123"}, {}, {}, "x{abc}y"),
("x {{abc}} y", {"abc": "123"}, {}, {}, "x {abc} y"),
# Multiple variables.
("{a}{b}", {"a": "12", "b": "34"}, {"a", "b"}, {}, "1234"),
(" {a} {b} ", {"a": "12", "b": "34"}, {"a", "b"}, {}, " 12 34 "),
(" {a|555} {b} ", {"a": "12", "b": "34"}, {"a", "b"}, {"a": "555"}, " 12 34 "),
(" {a|555} {b} ", {"b": "34"}, {"a", "b"}, {"a": "555"}, " 555 34 "),
("x{a}y{b}z", {"a": "12", "b": "34"}, {"a", "b"}, {}, "x12y34z"),
("x {a} y {b} z", {"a": "12", "b": "34"}, {"a", "b"}, {}, "x 12 y 34 z"),
("x {a:<4} y {b} z", {"a": "12", "b": "34"}, {"a", "b"}, {}, "x 12 y 34 z"),
# Quoted multiple variables
("{{a}}{{b}}", {"a": "12", "b": "34"}, {}, {}, "{a}{b}"),
(" {{a}} {{b}} ", {"a": "12", "b": "34"}, {}, {}, " {a} {b} "),
("x{{a}}y{{b}}z", {"a": "12", "b": "34"}, {}, {}, "x{a}y{b}z"),
("x {{a}} y {{b}} z", {"a": "12", "b": "34"}, {}, {}, "x {a} y {b} z"),
("x {{a}} y {{b|555}} z", {"a": "12", "b": "34"}, {}, {}, "x {a} y {b|555} z"),
# Multiple-quoted.
pytest.param(
"{{{abc}}}",
{"abc": "123"},
{},
{},
"{123}",
marks=pytest.mark.xfail(reason="Bug"),
), # Actual result: "123".
(" {{{abc}}} ", {"abc": "123"}, {"abc"}, {}, " {123} "),
("x{{{abc}}}y", {"abc": "123"}, {"abc"}, {}, "x{123}y"),
("{{{{abc}}}}", {"abc": "123"}, {}, {}, "{{abc}}"),
(" {{{{abc}}}} ", {"abc": "123"}, {}, {}, " {{abc}} "),
("x{{{{abc}}}}y", {"abc": "123"}, {}, {}, "x{{abc}}y"),
("x{{{{abc:30}}}}y", {"abc": "123"}, {}, {}, "x{{abc:30}}y"),
# With attribute/item getters.
("{abc.val}", {"abc": AnObject("123")}, {"abc"}, {}, "123"),
("x{abc.val}y", {"abc": AnObject("123")}, {"abc"}, {}, "x123y"),
pytest.param(
"{abc.val|xy}",
{},
{"abc"},
{"abc": "xy"},
"xy",
marks=pytest.mark.xfail(reason="Default for complex values is not supported"),
),
("{abc[1]}", {"abc": ["12", "34", "56"]}, {"abc"}, {}, "34"),
("x{abc[1]}y", {"abc": ["12", "34", "56"]}, {"abc"}, {}, "x34y"),
pytest.param(
"{abc[1]|xy}",
{},
{"abc"},
{"abc": "xy"},
"xy",
marks=pytest.mark.xfail(reason="Default for complex values is not supported"),
),
# Quoted with attribute/item getters.
("{{abc.val}}", {"abc": AnObject("123")}, {}, {}, "{abc.val}"),
("x{{abc.val}}y", {"abc": AnObject("123")}, {}, {}, "x{abc.val}y"),
("{{abc.val|xy}}", {}, {}, {}, "{abc.val|xy}"),
("{{abc[1]}}", {"abc": ["12", "34", "56"]}, {}, {}, "{abc[1]}"),
("x{{abc[1]}}y", {"abc": ["12", "34", "56"]}, {}, {}, "x{abc[1]}y"),
("{{abc[1]|xy}}", {}, {}, {}, "{abc[1]|xy}"),
# With formatters.
("{abc!r}", {"abc": "123"}, {"abc"}, {}, "'123'"),
("x{abc!r}y", {"abc": "123"}, {"abc"}, {}, "x'123'y"),
# Quoted with formatters.
("{{abc!r}}", {"abc": "123"}, {}, {}, "{abc!r}"),
("x{{abc!r}}y", {"abc": "123"}, {}, {}, "x{abc!r}y"),
# Multiple defaults
(
" {a|555} {b|666} {c|} ",
{},
{"a", "b", "c"},
{"a": "555", "b": "666", "c": ""},
" 555 666 ",
),
]
@pytest.mark.parametrize(
"format,vars,used_vars,expected_defaults,expected_result", cases
)
def test_format(format, vars, used_vars, expected_defaults, expected_result):
formatter = CustomFormatter(allow_empty=False)
result = formatter.format(format, **vars)
assert result == expected_result
@pytest.mark.parametrize(
"format,vars,expected_used_vars,expected_defaults,expected_result", cases
)
def test_used_params(
format, vars, expected_used_vars, expected_defaults, expected_result
):
used_vars = set(enum_str_format_required_params(format))
assert used_vars == set(expected_used_vars)
@pytest.mark.parametrize(
"format,vars,expected_used_vars,expected_defaults,expected_result", cases
)
def test_defaults(format, vars, expected_used_vars, expected_defaults, expected_result):
defaults = dict(enum_str_format_param_defaults(format))
assert defaults == expected_defaults
positional_cases = [
"{}",
"{:d}",
"{!r}",
"{[1]}",
"{[1]:d}",
"{[1]!r}",
"{.abc}",
"{.abc:d}",
"{.abc!r}",
"{0}",
"{2}",
"{2:<5}",
"{2!r}",
"{2.abc}",
"{2.abc!r}",
"{1[2]}",
"{1[2]!r}",
" {} ",
" {1} ",
"x{}y",
"x{2}y",
"x {} y",
"x {0} y",
"{abc}{}",
" {abc} {1} ",
" {abc} {1!r} ",
"x{abc}y{}z",
"x{abc} y {1} z",
"x{abc} y {1.abc} z",
"x{abc} y {1.abc:d} z",
]
@pytest.mark.parametrize("format", positional_cases)
def test_positional_args(format):
formatter = CustomFormatter(allow_empty=False)
with pytest.raises(RuntimeError) as excinfo:
list(formatter.enum_required_params(format))
message = f"Positional format arguments are not supported: {format!r}"
assert str(excinfo.value) == message
def test_undefined_with_default_whole():
formatter = CustomFormatter(allow_empty=False)
format = "{missing|default_value}"
params = {"missing": StrictUndefined(name="missing")}
result = formatter.format(format, **params)
assert result == "default_value"
def test_undefined_with_default():
formatter = CustomFormatter(allow_empty=False)
format = "[{missing|default_value}]"
params = {"missing": StrictUndefined(name="missing")}
result = formatter.format(format, **params)
assert result == "[default_value]"

View File

@ -0,0 +1,21 @@
from unittest.mock import Mock
import pytest
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.yaml_objects import J2String
cases = [
("{{ abc }}", {"abc"}),
("{% if cond %} {{ x }} {% else %} {{ y }} {% endif %}", {"cond", "x", "y"}),
("{# {{ abc }} #}", {}),
]
@pytest.mark.parametrize("format,expected_used_params", cases)
def test_jinja2_required_params(format, expected_used_params):
config = JJBConfig()
loader = Mock(source_path=None)
template = J2String(config, loader, format)
assert template.required_params == set(expected_used_params)

View File

@ -1,10 +1,10 @@
- retain_anchors_wrapper_defaults: &retain_anchors_wrapper_defaults
- _retain_anchors_wrapper_defaults: &retain_anchors_wrapper_defaults
name: 'retain_anchors_wrapper_defaults'
wrappers:
- timeout:
timeout: 180
fail: true
- retain_anchors_defaults: &retain_anchors_defaults
- _retain_anchors_defaults: &retain_anchors_defaults
name: 'retain_anchors_defaults'
<<: *retain_anchors_wrapper_defaults

View File

@ -1,3 +1,3 @@
- globals:
- defaults:
name: globals
docker-image: &docker-image "ubuntu:latest"

View File

@ -1,4 +1,4 @@
- builders:
- builder:
name: custom-copytarball1
builders:
- copyartifact: &custom-copytarball

View File

@ -1,4 +1,4 @@
- builders:
- builder:
name: custom-copytarball2
builders:
- copyartifact: &custom-copytarball

View File

@ -0,0 +1,14 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# Sample script showing how the yaml include-raw tag can be used
# to inline scripts that are maintained outside of the jenkins
# job yaml configuration.
echo &quot;hello world&quot;
exit 0
</command>
</hudson.tasks.Shell>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;hello&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${VAR1} ${VAR2}&quot;
[[ -n &quot;${VAR3}&quot; ]] &amp;&amp; {
# this next section is executed as one
echo &quot;${VAR3}&quot;
exit 0
}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,14 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,45 @@
[
{
"name": "test-job-1",
"builders": [
{
"copyartifact": {
"project": "foo",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "last-successful",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
},
{
"copyartifact": {
"project": "bar",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "specific-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true",
"build-number": 123
}
},
{
"copyartifact": {
"project": "baz",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "upstream-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -1,5 +1,7 @@
- builders:
- builder:
name: sample-builder
builders:
- copyartifact: &copytarball
project: foo
filter: "*.tar.gz"

View File

@ -0,0 +1,11 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n\n#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# Sample script showing how the yaml include-raw tag can be used
# to inline scripts that are maintained outside of the jenkins
# job yaml configuration.
echo &quot;hello world&quot;
exit 0
#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;hello&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${VAR1} ${VAR2}&quot;
[[ -n &quot;${VAR3}&quot; ]] &amp;&amp; {
# this next section is executed as one
echo &quot;${VAR3}&quot;
exit 0
}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,14 @@
[
{
"name": "test-job-include-raw",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${{VAR1}} ${{VAR2}}\"\n\n[[ -n \"${{VAR3}}\" ]] && {{\n # this next section is executed as one\n echo \"${{VAR3}}\"\n exit 0\n}}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# Sample script showing how the yaml include-raw tag can be used
# to inline scripts that are maintained outside of the jenkins
# job yaml configuration.
echo &quot;hello world&quot;
exit 0
</command>
</hudson.tasks.Shell>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;hello&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${{VAR1}} ${{VAR2}}&quot;
[[ -n &quot;${{VAR3}}&quot; ]] &amp;&amp; {{
# this next section is executed as one
echo &quot;${{VAR3}}&quot;
exit 0
}}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,8 @@
# Using include-raw-excape inside job cause double braces in included file, like: {{VAR1}}.
- job:
name: test-job-include-raw
builders:
- shell:
!include-raw-escape: include-raw001-hello-world.sh
- shell:
!include-raw-escape: include-raw001-vars.sh

View File

@ -0,0 +1,14 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# Sample script showing how the yaml include-raw tag can be used
# to inline scripts that are maintained outside of the jenkins
# job yaml configuration.
echo &quot;hello world&quot;
exit 0
</command>
</hudson.tasks.Shell>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;hello&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${VAR1} ${VAR2}&quot;
[[ -n &quot;${VAR3}&quot; ]] &amp;&amp; {
# this next section is executed as one
echo &quot;${VAR3}&quot;
exit 0
}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,11 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n\n#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# Sample script showing how the yaml include-raw tag can be used
# to inline scripts that are maintained outside of the jenkins
# job yaml configuration.
echo &quot;hello world&quot;
exit 0
#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;hello&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${VAR1} ${VAR2}&quot;
[[ -n &quot;${VAR3}&quot; ]] &amp;&amp; {
# this next section is executed as one
echo &quot;${VAR3}&quot;
exit 0
}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,14 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# Sample script showing how the yaml include-raw tag can be used
# to inline scripts that are maintained outside of the jenkins
# job yaml configuration.
echo &quot;hello world&quot;
exit 0
</command>
</hudson.tasks.Shell>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;hello&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${VAR1} ${VAR2}&quot;
[[ -n &quot;${VAR3}&quot; ]] &amp;&amp; {
# this next section is executed as one
echo &quot;${VAR3}&quot;
exit 0
}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,14 @@
#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1="{num}"
VAR2="world"
VAR3="${{VAR1}} ${{VAR2}}"
[[ -n "${{VAR3}}" ]] && {{
# this next section is executed as one
echo "${{VAR3}}"
exit 0
}}

View File

@ -0,0 +1,11 @@
[
{
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"1\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="utf-8"?>
<project>
<actions/>
<description>&lt;!-- Managed by Jenkins Job Builder --&gt;</description>
<keepDependencies>false</keepDependencies>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<concurrentBuild>false</concurrentBuild>
<canRoam>true</canRoam>
<properties/>
<scm class="hudson.scm.NullSCM"/>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
#
# sample script to check that brackets aren't escaped
# when using the include-raw application yaml tag
VAR1=&quot;1&quot;
VAR2=&quot;world&quot;
VAR3=&quot;${VAR1} ${VAR2}&quot;
[[ -n &quot;${VAR3}&quot; ]] &amp;&amp; {
# this next section is executed as one
echo &quot;${VAR3}&quot;
exit 0
}
</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>

View File

@ -0,0 +1,11 @@
- job-template:
name: test-job-include-raw-{num}
builders:
- shell:
!include-raw: include-raw001-parameterized.sh
- project:
name: test-job-template
num: 1
jobs:
- 'test-job-include-raw-{num}'

View File

@ -0,0 +1,45 @@
[
{
"name": "test-job-1",
"builders": [
{
"copyartifact": {
"project": "foo",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "last-successful",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
},
{
"copyartifact": {
"project": "bar",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "specific-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true",
"build-number": 123
}
},
{
"copyartifact": {
"project": "baz",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "upstream-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,18 @@
[
{
"name": "sample-job",
"builders": [
{
"shell": [
{
"string-with-comma": "item1,item2,item3"
},
{
"string-with-space": "item1 item2 item3"
}
]
}
],
"description": "<!-- Managed by Jenkins Job Builder -->"
}
]

View File

@ -0,0 +1,17 @@
- job:
name: sample-job
builders:
- shell:
- string-with-comma: !join:
- ','
-
- item1
- item2
- item3
- string-with-space: !join:
- ' '
-
- item1
- item2
- item3

View File

@ -22,9 +22,10 @@ import json
import pytest
from yaml.composer import ComposerError
import jenkins_jobs.local_yaml as yaml
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.roots import Roots
from jenkins_jobs.loader import Loader, load_files
from jenkins_jobs.registry import ModuleRegistry
from tests.enum_scenarios import scenario_list
@ -33,12 +34,14 @@ fixtures_dir = Path(__file__).parent / "fixtures"
@pytest.fixture
def read_input(scenario):
def read_input(scenario, jjb_config):
def read():
return yaml.load(
loader = Loader(
scenario.in_path.read_text(),
search_path=[str(fixtures_dir)],
jjb_config=jjb_config,
source_path=scenario.in_path,
)
return loader.get_single_data()
return read
@ -51,15 +54,30 @@ def read_input(scenario):
if not s.name.startswith(("custom_", "exception_"))
],
)
def test_include(read_input, expected_output):
def test_include(scenario, jjb_config, expected_output):
"""
Verify application specific tags independently of any changes to
modules XML parsing behaviour
"""
input = read_input()
pretty_json = json.dumps(input, indent=4, separators=(",", ": "))
assert expected_output.rstrip() == pretty_json
roots = Roots(jjb_config)
load_files(jjb_config, roots, [scenario.in_path])
job_data_list = roots.generate_jobs()
pretty_json = json.dumps(job_data_list, indent=4)
print(pretty_json)
assert pretty_json == expected_output.strip()
@pytest.mark.parametrize(
"scenario",
[
pytest.param(s, id=s.name)
for s in scenario_list(fixtures_dir)
if not s.name.startswith(("custom_", "exception_")) and s.out_paths
],
)
def test_include_job(check_job):
check_job()
@pytest.mark.parametrize(
@ -70,9 +88,9 @@ def test_include(read_input, expected_output):
if s.name.startswith("exception_")
],
)
def test_include_error(read_input, expected_output):
def test_include_error(check_job):
with pytest.raises(ComposerError) as excinfo:
_ = read_input()
check_job()
assert str(excinfo.value).startswith("found duplicate anchor ")
@ -116,9 +134,9 @@ def test_include_anchors():
"custom_same_anchor-001-part2.yaml",
]
parser = YamlParser(config)
roots = Roots(config)
# Should not raise ComposerError.
parser.load_files([str(fixtures_dir / name) for name in files])
load_files(config, roots, [fixtures_dir / name for name in files])
def test_retain_anchor_default():
@ -134,9 +152,9 @@ def test_retain_anchor_default():
"custom_retain_anchors.yaml",
]
parser = YamlParser(config)
roots = Roots(config)
with pytest.raises(ComposerError) as excinfo:
parser.load_files([str(fixtures_dir / name) for name in files])
load_files(config, roots, [fixtures_dir / name for name in files])
assert "found undefined alias" in str(excinfo.value)
@ -155,9 +173,9 @@ def test_retain_anchors_enabled():
"custom_retain_anchors.yaml",
]
parser = YamlParser(config)
roots = Roots(config)
# Should not raise ComposerError.
parser.load_files([str(fixtures_dir / name) for name in files])
load_files(config, roots, [fixtures_dir / name for name in files])
def test_retain_anchors_enabled_j2_yaml():
@ -174,9 +192,10 @@ def test_retain_anchors_enabled_j2_yaml():
"custom_retain_anchors_j2_yaml.yaml",
]
parser = YamlParser(config)
parser.load_files([str(fixtures_dir / name) for name in files])
roots = Roots(config)
load_files(config, roots, [fixtures_dir / name for name in files])
registry = ModuleRegistry(config, None)
jobs, _ = parser.expandYaml(registry)
registry.set_macros(roots.macros)
jobs = roots.generate_jobs()
assert "docker run ubuntu:latest" == jobs[0]["builders"][0]["shell"]

View File

@ -1,24 +0,0 @@
[
{
"job-template": {
"name": "test-job-include-raw-{num}",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${{VAR1}} ${{VAR2}}\"\n\n[[ -n \"${{VAR3}}\" ]] && {{\n # this next section is executed as one\n echo \"${{VAR3}}\"\n exit 0\n}}\n"
}
]
}
},
{
"project": {
"name": "test-job-template-1",
"num": 1,
"jobs": [
"test-job-include-raw-{num}"
]
}
}
]

View File

@ -1,15 +0,0 @@
[
{
"job": {
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
]
}
}
]

View File

@ -1,46 +0,0 @@
[
{
"job": {
"name": "test-job-1",
"builders": [
{
"copyartifact": {
"project": "foo",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "last-successful",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
},
{
"copyartifact": {
"project": "bar",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "specific-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true",
"build-number": 123
}
},
{
"copyartifact": {
"project": "baz",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "upstream-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
}
]
}
}
]

View File

@ -1,21 +0,0 @@
[
{
"job-template": {
"name": "test-job-include-raw-{num}",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n\n#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${{VAR1}} ${{VAR2}}\"\n\n[[ -n \"${{VAR3}}\" ]] && {{\n # this next section is executed as one\n echo \"${{VAR3}}\"\n exit 0\n}}\n"
}
]
}
},
{
"project": {
"name": "test-job-template-1",
"num": 1,
"jobs": [
"test-job-include-raw-{num}"
]
}
}
]

View File

@ -1,24 +0,0 @@
[
{
"job-template": {
"name": "test-job-include-raw-{num}",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${{VAR1}} ${{VAR2}}\"\n\n[[ -n \"${{VAR3}}\" ]] && {{\n # this next section is executed as one\n echo \"${{VAR3}}\"\n exit 0\n}}\n"
}
]
}
},
{
"project": {
"name": "test-job-template-1",
"num": 1,
"jobs": [
"test-job-include-raw-{num}"
]
}
}
]

View File

@ -1,12 +0,0 @@
[
{
"job": {
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n\n#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
]
}
}
]

View File

@ -1,15 +0,0 @@
[
{
"job": {
"name": "test-job-include-raw-1",
"builders": [
{
"shell": "#!/bin/bash\n#\n# Sample script showing how the yaml include-raw tag can be used\n# to inline scripts that are maintained outside of the jenkins\n# job yaml configuration.\n\necho \"hello world\"\n\nexit 0\n"
},
{
"shell": "#!/bin/bash\n#\n# sample script to check that brackets aren't escaped\n# when using the include-raw application yaml tag\n\nVAR1=\"hello\"\nVAR2=\"world\"\nVAR3=\"${VAR1} ${VAR2}\"\n\n[[ -n \"${VAR3}\" ]] && {\n # this next section is executed as one\n echo \"${VAR3}\"\n exit 0\n}\n"
}
]
}
}
]

View File

@ -1,46 +0,0 @@
[
{
"job": {
"name": "test-job-1",
"builders": [
{
"copyartifact": {
"project": "foo",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "last-successful",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
},
{
"copyartifact": {
"project": "bar",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "specific-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true",
"build-number": 123
}
},
{
"copyartifact": {
"project": "baz",
"filter": "*.tar.gz",
"target": "/home/foo",
"which-build": "upstream-build",
"optional": true,
"flatten": true,
"do-not-fingerprint": true,
"parameter-filters": "PUBLISH=true"
}
}
]
}
}
]

View File

@ -1,8 +0,0 @@
[
{
"string-with-comma": "item1,item2,item3"
},
{
"string-with-space": "item1 item2 item3"
}
]

View File

@ -1,13 +0,0 @@
- string-with-comma: !join:
- ','
-
- item1
- item2
- item3
- string-with-space: !join:
- ' '
-
- item1
- item2
- item3

View File

@ -0,0 +1 @@
echo "Parameter branch={{ branches }} is passed as macro parameter to this script"

View File

@ -0,0 +1,18 @@
- builder:
name: a-builder
builders:
- shell:
!include-jinja2: missing_include_j2.yaml.inc
- project:
name: missing_params_for_component
jobs:
- 'template-requiring-component-param-{os}':
os: 'ubuntu-xenial'
- job-template:
name: 'template-requiring-component-param-{os}'
disabled: true
builders:
- a-builder:
branch: master

View File

@ -0,0 +1,20 @@
- scm:
name: default-git-scm
scm:
- git:
url: https://github.com/openstack-infra/jenkins-job-builder.git
branches: !j2: '{{ branches }}'
clean: true
- project:
name: missing_params_for_component
jobs:
- 'template-requiring-component-param-{os}':
os: 'ubuntu-xenial'
- job-template:
name: 'template-requiring-component-param-{os}'
disabled: true
scm:
- default-git-scm:
branch: master

View File

@ -18,35 +18,41 @@ import pytest
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJobGenerator, XmlViewGenerator
from jenkins_jobs.roots import Roots
from jenkins_jobs.loader import load_files
fixtures_dir = Path(__file__).parent / "exceptions"
# Override jjb_config and plugins_info so that scenarios won't be used.
@pytest.fixture
def config():
def jjb_config():
config = JJBConfig()
config.validate()
return config
@pytest.fixture
def parser(config):
return YamlParser(config)
def plugins_info():
return None
@pytest.fixture
def registry(config):
return ModuleRegistry(config)
def parser(jjb_config, registry):
def parse(fname):
roots = Roots(jjb_config)
load_files(jjb_config, roots, [fixtures_dir / fname])
registry.set_macros(roots.macros)
return roots
return parse
def test_invalid_project(parser, registry):
parser.parse(str(fixtures_dir / "invalid_project.yaml"))
jobs, views = parser.expandYaml(registry)
roots = parser("invalid_project.yaml")
jobs = roots.generate_jobs()
generator = XmlJobGenerator(registry)
with pytest.raises(JenkinsJobsException) as excinfo:
@ -55,9 +61,8 @@ def test_invalid_project(parser, registry):
def test_invalid_view(parser, registry):
parser.parse(str(fixtures_dir / "invalid_view.yaml"))
jobs, views = parser.expandYaml(registry)
roots = parser("invalid_view.yaml")
views = roots.generate_views()
generator = XmlViewGenerator(registry)
with pytest.raises(JenkinsJobsException) as excinfo:
@ -65,14 +70,44 @@ def test_invalid_view(parser, registry):
assert "Unrecognized view-type:" in str(excinfo.value)
def test_template_params(caplog, parser, registry):
parser.parse(str(fixtures_dir / "failure_formatting_component.yaml"))
registry.set_parser_data(parser.data)
jobs, views = parser.expandYaml(registry)
def test_template_params(parser, registry):
roots = parser("failure_formatting_component.yaml")
jobs = roots.generate_jobs()
generator = XmlJobGenerator(registry)
with pytest.raises(Exception):
with pytest.raises(Exception) as excinfo:
generator.generateXML(jobs)
assert "Failure formatting component" in caplog.text
assert "Problem formatting with args" in caplog.text
message = (
"While expanding macro 'default-git-scm':"
" While formatting string '{branches}': Missing parameter: 'branches'"
)
assert str(excinfo.value) == message
def test_missing_j2_param(parser, registry):
roots = parser("missing_j2_parameter.yaml")
jobs = roots.generate_jobs()
generator = XmlJobGenerator(registry)
with pytest.raises(Exception) as excinfo:
generator.generateXML(jobs)
message = (
"While expanding macro 'default-git-scm':"
" While formatting jinja2 template '{{ branches }}': 'branches' is undefined"
)
assert str(excinfo.value) == message
def test_missing_include_j2_param(parser, registry):
roots = parser("missing_include_j2_parameter.yaml")
jobs = roots.generate_jobs()
generator = XmlJobGenerator(registry)
with pytest.raises(Exception) as excinfo:
generator.generateXML(jobs)
message = (
"While expanding macro 'a-builder':"
" While formatting jinja2 template 'echo \"Parameter branch={{ branches }} is...':"
" 'branches' is undefined"
)
assert str(excinfo.value) == message

View File

@ -0,0 +1 @@
Project missing_params_for_params: Job/view dict should be single-item, but have keys ['template-requiring-param-{os}', 'os']. Missing indent?

View File

@ -0,0 +1,18 @@
- defaults:
name: global
date: 20161015
- project:
name: missing_params_for_params
# deliberately missing value for 'bdate' to trigger
# problem formatting params with default
flavor:
- trusty-{date}
- xenial-{bdate}
jobs:
- 'template-requiring-param-{os}':
os: 'ubuntu-{flavour}'
- job-template:
name: 'template-requiring-param-{os}'
disabled: true

View File

@ -0,0 +1 @@
While expanding 'flavor', used by , used by template 'template-requiring-param-{os}': While formatting string 'xenial-{bdate}': 'bdate' is undefined

View File

@ -0,0 +1 @@
While formatting string 'template-requiring-param-{os}': Missing parameter: 'os'

View File

@ -0,0 +1 @@
Invalid parameter 'stream' definition for template 'template-incorrect-args-{stream}-{os}': Expected a value or a dict with single element, but got: {'current': None, 'branch': 'current'}

View File

@ -0,0 +1 @@
Job group group-1: Failed to find suitable job/view/template named 'job-2'

View File

@ -1,4 +1,4 @@
# When job group includes job which is never declared, it's just ignored.
# When job group includes job which is never declared, it issues an error.
- job-template:
name: job-1

View File

@ -1 +1 @@
'missing_param' is undefined
While expanding macro 'sample-builder': While formatting jinja2 template 'echo {{ missing_param }} {{ other_param ...': 'missing_param' is undefined

Some files were not shown because too many files have changed in this diff Show More