Rewrite YAML parser
Rewrite YAML parser, YAML objects and parameters expansion logic to enable better control over expansion logic. Broken backward compatilibity: * More agressive parameter expansion. This may lead to parameters expanded in places where they were not expanded before. * Top-level elements, which is not known to parser (such as 'job', 'view', 'project' etc), are now lead to parse failures. Prepend them with underscore to be ignored by parser. * Files included using '!include-raw:' elements and having formatting in it's path ('lazy-loaded' in previous implementation) are now expanded too. Use '!include-raw-escape:' for them instead. See changes in these tests for examples: tests/yamlparser/job_fixtures/lazy-load-jobs-multi001.yaml tests/yamlparser/job_fixtures/lazy-load-jobs-multi002.yaml tests/yamlparser/job_fixtures/lazy-load-jobs001.yaml * Parameters with template value using itself were substituted as is. For example: "timer: '{timer}'" was expanded to "{timer}". Now it leads to recursive parameter error. See changes in this test for example: tests/yamlparser/job_fixtures/parameter_name_reuse_default.* -> tests/yamlparser/error_fixtures/parameter_name_reuse_default.* * When job group includes a job which was never declared, it was just ignored. Now it fails: job is missing. See changes in this test for example: tests/yamlparser/job_fixtures/job_group_includes_missing_job.* -> tests/yamlparser/error_fixtures/job_group_includes_missing_job.* Change-Id: Ief4e515f065a1b9e0f74fe06d7e94fa77d69f273
This commit is contained in:
parent
a47e4ee896
commit
af9e03ec08
@ -659,12 +659,12 @@ the same anchor name in included files without collisions.
|
||||
A simple example can be seen in the specs `full length example`_ with the
|
||||
following being more representative of usage within JJB:
|
||||
|
||||
.. literalinclude:: /../../tests/localyaml/fixtures/anchors_aliases.iyaml
|
||||
.. literalinclude:: /../../tests/loader/fixtures/anchors_aliases.iyaml
|
||||
|
||||
|
||||
Which will be expanded to the following yaml before being processed:
|
||||
|
||||
.. literalinclude:: /../../tests/localyaml/fixtures/anchors_aliases.oyaml
|
||||
.. literalinclude:: /../../tests/loader/fixtures/anchors_aliases.oyaml
|
||||
|
||||
|
||||
.. _full length example: https://yaml.org/spec/1.2.2/#25-full-length-example
|
||||
@ -674,7 +674,7 @@ Which will be expanded to the following yaml before being processed:
|
||||
Custom Yaml Tags
|
||||
----------------
|
||||
|
||||
.. automodule:: jenkins_jobs.local_yaml
|
||||
.. automodule:: jenkins_jobs.yaml_objects
|
||||
|
||||
|
||||
Modules
|
||||
|
@ -17,6 +17,7 @@ import io
|
||||
import os
|
||||
import logging
|
||||
import platform
|
||||
from pathlib import Path
|
||||
|
||||
from stevedore import extension
|
||||
import yaml
|
||||
@ -126,7 +127,9 @@ class JenkinsJobs(object):
|
||||
self.options.path = [self.options.path]
|
||||
else:
|
||||
# take list of paths
|
||||
self.options.path = self.options.path.split(os.pathsep)
|
||||
self.options.path = [
|
||||
Path(p) for p in self.options.path.split(os.pathsep)
|
||||
]
|
||||
|
||||
do_recurse = (
|
||||
getattr(self.options, "recursive", False)
|
||||
@ -142,7 +145,7 @@ class JenkinsJobs(object):
|
||||
paths.extend(utils.recurse_path(path, excludes))
|
||||
else:
|
||||
paths.append(path)
|
||||
self.options.path = paths
|
||||
self.options.path = [Path(p) for p in paths]
|
||||
|
||||
def execute(self):
|
||||
|
||||
|
@ -14,11 +14,39 @@
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import six
|
||||
import fnmatch
|
||||
import logging
|
||||
import time
|
||||
|
||||
from jenkins_jobs.builder import JenkinsManager
|
||||
from jenkins_jobs.registry import ModuleRegistry
|
||||
from jenkins_jobs.roots import Roots
|
||||
from jenkins_jobs.xml_config import XmlJobGenerator
|
||||
from jenkins_jobs.xml_config import XmlViewGenerator
|
||||
from jenkins_jobs.loader import load_files
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseSubCommand(object):
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def matches(name, glob_list):
|
||||
"""
|
||||
Checks if the given string, ``name``, matches any of the glob patterns in
|
||||
the iterable, ``glob_list``
|
||||
|
||||
:arg str name: String (job or view name) to test if it matches a pattern
|
||||
:arg iterable glob_list: glob patterns to match (list, tuple, set, etc.)
|
||||
"""
|
||||
return any(fnmatch.fnmatch(name, glob) for glob in glob_list)
|
||||
|
||||
|
||||
def filter_matching(item_list, glob_list):
|
||||
if not glob_list:
|
||||
return item_list
|
||||
return [item for item in item_list if matches(item["name"], glob_list)]
|
||||
|
||||
|
||||
class BaseSubCommand(metaclass=abc.ABCMeta):
|
||||
"""Base class for Jenkins Job Builder subcommands, intended to allow
|
||||
subcommands to be loaded as stevedore extensions by third party users.
|
||||
"""
|
||||
@ -69,3 +97,39 @@ class BaseSubCommand(object):
|
||||
help="paths to exclude when using recursive search, "
|
||||
"uses standard globbing.",
|
||||
)
|
||||
|
||||
|
||||
class JobsSubCommand(BaseSubCommand):
|
||||
"""Base class for Jenkins Job Builder subcommands which generates jobs."""
|
||||
|
||||
def load_roots(self, jjb_config, path_list):
|
||||
roots = Roots(jjb_config)
|
||||
load_files(jjb_config, roots, path_list)
|
||||
return roots
|
||||
|
||||
def make_jobs_and_views_xml(self, jjb_config, path_list, glob_list):
|
||||
logger.info("Updating jobs in {0} ({1})".format(path_list, glob_list))
|
||||
orig = time.time()
|
||||
|
||||
roots = self.load_roots(jjb_config, path_list)
|
||||
|
||||
builder = JenkinsManager(jjb_config)
|
||||
|
||||
registry = ModuleRegistry(jjb_config, builder.plugins_list)
|
||||
registry.set_macros(roots.macros)
|
||||
|
||||
jobs = filter_matching(roots.generate_jobs(), glob_list)
|
||||
views = filter_matching(roots.generate_views(), glob_list)
|
||||
|
||||
registry.amend_job_dicts(jobs)
|
||||
|
||||
xml_job_generator = XmlJobGenerator(registry)
|
||||
xml_view_generator = XmlViewGenerator(registry)
|
||||
|
||||
xml_jobs = xml_job_generator.generateXML(jobs)
|
||||
xml_views = xml_view_generator.generateXML(views)
|
||||
|
||||
step = time.time()
|
||||
logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))
|
||||
|
||||
return builder, xml_jobs, xml_views
|
||||
|
@ -16,12 +16,10 @@
|
||||
|
||||
from jenkins_jobs.builder import JenkinsManager
|
||||
from jenkins_jobs.errors import JenkinsJobsException
|
||||
from jenkins_jobs.parser import YamlParser
|
||||
from jenkins_jobs.registry import ModuleRegistry
|
||||
import jenkins_jobs.cli.subcommand.base as base
|
||||
|
||||
|
||||
class DeleteSubCommand(base.BaseSubCommand):
|
||||
class DeleteSubCommand(base.JobsSubCommand):
|
||||
def parse_args(self, subparser):
|
||||
delete = subparser.add_parser("delete")
|
||||
|
||||
@ -59,23 +57,20 @@ class DeleteSubCommand(base.BaseSubCommand):
|
||||
'"--views-only" and "--jobs-only" cannot be used together.'
|
||||
)
|
||||
|
||||
fn = options.path
|
||||
registry = ModuleRegistry(jjb_config, builder.plugins_list)
|
||||
parser = YamlParser(jjb_config)
|
||||
|
||||
if fn:
|
||||
parser.load_files(fn)
|
||||
parser.expandYaml(registry, options.name)
|
||||
jobs = [j["name"] for j in parser.jobs]
|
||||
views = [v["name"] for v in parser.views]
|
||||
if options.path:
|
||||
roots = self.load_roots(jjb_config, options.path)
|
||||
jobs = base.filter_matching(roots.generate_jobs(), options.name)
|
||||
views = base.filter_matching(roots.generate_views(), options.name)
|
||||
job_names = [j["name"] for j in jobs]
|
||||
view_names = [v["name"] for v in views]
|
||||
else:
|
||||
jobs = options.name
|
||||
views = options.name
|
||||
job_names = options.name
|
||||
view_names = options.name
|
||||
|
||||
if options.del_jobs:
|
||||
builder.delete_jobs(jobs)
|
||||
builder.delete_jobs(job_names)
|
||||
elif options.del_views:
|
||||
builder.delete_views(views)
|
||||
builder.delete_views(view_names)
|
||||
else:
|
||||
builder.delete_jobs(jobs)
|
||||
builder.delete_views(views)
|
||||
builder.delete_jobs(job_names)
|
||||
builder.delete_views(view_names)
|
||||
|
@ -14,11 +14,10 @@
|
||||
# under the License.
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from jenkins_jobs.builder import JenkinsManager
|
||||
import jenkins_jobs.cli.subcommand.base as base
|
||||
import jenkins_jobs.utils as utils
|
||||
import jenkins_jobs.builder as builder
|
||||
import jenkins_jobs.parser as parser
|
||||
import jenkins_jobs.registry as registry
|
||||
|
||||
|
||||
def list_duplicates(seq):
|
||||
@ -26,7 +25,7 @@ def list_duplicates(seq):
|
||||
return set(x for x in seq if x in seen or seen.add(x))
|
||||
|
||||
|
||||
class ListSubCommand(base.BaseSubCommand):
|
||||
class ListSubCommand(base.JobsSubCommand):
|
||||
def parse_args(self, subparser):
|
||||
list = subparser.add_parser("list", help="List jobs")
|
||||
|
||||
@ -38,10 +37,7 @@ class ListSubCommand(base.BaseSubCommand):
|
||||
)
|
||||
|
||||
def execute(self, options, jjb_config):
|
||||
self.jjb_config = jjb_config
|
||||
self.jenkins = builder.JenkinsManager(jjb_config)
|
||||
|
||||
jobs = self.get_jobs(options.names, options.path)
|
||||
jobs = self.get_jobs(jjb_config, options.path, options.names)
|
||||
|
||||
logging.info("Matching jobs: %d", len(jobs))
|
||||
stdout = utils.wrap_stream(sys.stdout)
|
||||
@ -49,24 +45,23 @@ class ListSubCommand(base.BaseSubCommand):
|
||||
for job in jobs:
|
||||
stdout.write((job + "\n").encode("utf-8"))
|
||||
|
||||
def get_jobs(self, jobs_glob=None, fn=None):
|
||||
if fn:
|
||||
r = registry.ModuleRegistry(self.jjb_config, self.jenkins.plugins_list)
|
||||
p = parser.YamlParser(self.jjb_config)
|
||||
p.load_files(fn)
|
||||
p.expandYaml(r, jobs_glob)
|
||||
jobs = [j["name"] for j in p.jobs]
|
||||
def get_jobs(self, jjb_config, path_list, glob_list):
|
||||
if path_list:
|
||||
roots = self.load_roots(jjb_config, path_list)
|
||||
jobs = base.filter_matching(roots.generate_jobs(), glob_list)
|
||||
job_names = [j["name"] for j in jobs]
|
||||
else:
|
||||
jobs = [
|
||||
jenkins = JenkinsManager(jjb_config)
|
||||
job_names = [
|
||||
j["fullname"]
|
||||
for j in self.jenkins.get_jobs()
|
||||
if not jobs_glob or parser.matches(j["fullname"], jobs_glob)
|
||||
for j in jenkins.get_jobs()
|
||||
if not glob_list or base.matches(j["fullname"], glob_list)
|
||||
]
|
||||
|
||||
jobs = sorted(jobs)
|
||||
for duplicate in list_duplicates(jobs):
|
||||
job_names = sorted(job_names)
|
||||
for duplicate in list_duplicates(job_names):
|
||||
logging.warning("Found duplicate job name '%s', likely bug.", duplicate)
|
||||
|
||||
logging.debug("Builder.get_jobs: returning %r", jobs)
|
||||
logging.debug("Builder.get_jobs: returning %r", job_names)
|
||||
|
||||
return jobs
|
||||
return job_names
|
||||
|
@ -51,7 +51,7 @@ class TestSubCommand(update.UpdateSubCommand):
|
||||
|
||||
def execute(self, options, jjb_config):
|
||||
if not options.config_xml:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"(Deprecated) The default output behavior of"
|
||||
" `jenkins-jobs test` when given the --output"
|
||||
" flag will change in JJB 3.0."
|
||||
@ -61,7 +61,9 @@ class TestSubCommand(update.UpdateSubCommand):
|
||||
" `--config-xml` parameter."
|
||||
)
|
||||
|
||||
builder, xml_jobs, xml_views = self._generate_xmljobs(options, jjb_config)
|
||||
builder, xml_jobs, xml_views = self.make_jobs_and_views_xml(
|
||||
jjb_config, options.path, options.names
|
||||
)
|
||||
|
||||
builder.update_jobs(
|
||||
xml_jobs,
|
||||
|
@ -15,13 +15,7 @@
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from jenkins_jobs.builder import JenkinsManager
|
||||
from jenkins_jobs.parser import YamlParser
|
||||
from jenkins_jobs.registry import ModuleRegistry
|
||||
from jenkins_jobs.xml_config import XmlJobGenerator
|
||||
from jenkins_jobs.xml_config import XmlViewGenerator
|
||||
from jenkins_jobs.errors import JenkinsJobsException
|
||||
import jenkins_jobs.cli.subcommand.base as base
|
||||
|
||||
@ -29,7 +23,7 @@ import jenkins_jobs.cli.subcommand.base as base
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UpdateSubCommand(base.BaseSubCommand):
|
||||
class UpdateSubCommand(base.JobsSubCommand):
|
||||
def parse_arg_path(self, parser):
|
||||
parser.add_argument(
|
||||
"path",
|
||||
@ -107,39 +101,15 @@ class UpdateSubCommand(base.BaseSubCommand):
|
||||
help="update only views",
|
||||
)
|
||||
|
||||
def _generate_xmljobs(self, options, jjb_config=None):
|
||||
builder = JenkinsManager(jjb_config)
|
||||
|
||||
logger.info("Updating jobs in {0} ({1})".format(options.path, options.names))
|
||||
orig = time.time()
|
||||
|
||||
# Generate XML
|
||||
parser = YamlParser(jjb_config)
|
||||
registry = ModuleRegistry(jjb_config, builder.plugins_list)
|
||||
xml_job_generator = XmlJobGenerator(registry)
|
||||
xml_view_generator = XmlViewGenerator(registry)
|
||||
|
||||
parser.load_files(options.path)
|
||||
registry.set_parser_data(parser.data)
|
||||
|
||||
job_data_list, view_data_list = parser.expandYaml(registry, options.names)
|
||||
|
||||
xml_jobs = xml_job_generator.generateXML(job_data_list)
|
||||
xml_views = xml_view_generator.generateXML(view_data_list)
|
||||
|
||||
jobs = parser.jobs
|
||||
step = time.time()
|
||||
logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))
|
||||
|
||||
return builder, xml_jobs, xml_views
|
||||
|
||||
def execute(self, options, jjb_config):
|
||||
if options.n_workers < 0:
|
||||
raise JenkinsJobsException(
|
||||
"Number of workers must be equal or greater than 0"
|
||||
)
|
||||
|
||||
builder, xml_jobs, xml_views = self._generate_xmljobs(options, jjb_config)
|
||||
builder, xml_jobs, xml_views = self.make_jobs_and_views_xml(
|
||||
jjb_config, options.path, options.names
|
||||
)
|
||||
|
||||
if options.enabled_only:
|
||||
# filter out jobs which are disabled
|
||||
|
187
jenkins_jobs/defaults.py
Normal file
187
jenkins_jobs/defaults.py
Normal file
@ -0,0 +1,187 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
job_contents_keys = {
|
||||
# Same as for macros.
|
||||
"parameters",
|
||||
"properties",
|
||||
"builders",
|
||||
"wrappers",
|
||||
"triggers",
|
||||
"publishers",
|
||||
"scm",
|
||||
"pipeline-scm",
|
||||
"reporters",
|
||||
# General.
|
||||
"project-type",
|
||||
"folder",
|
||||
"node",
|
||||
"jdk",
|
||||
"actions",
|
||||
"disabled",
|
||||
"display-name",
|
||||
"block-downstream",
|
||||
"block-upstream",
|
||||
"auth-token",
|
||||
"concurrent",
|
||||
"workspace",
|
||||
"child-workspace",
|
||||
"quiet-period",
|
||||
"retry-count",
|
||||
"logrotate",
|
||||
"raw",
|
||||
# Builders.
|
||||
"prebuilders",
|
||||
"postbuilders",
|
||||
# HipChat.
|
||||
"hipchat",
|
||||
# Notificatoins.
|
||||
"notifications",
|
||||
# project Flow.
|
||||
"dsl",
|
||||
"needs-workspace",
|
||||
"dsl-file",
|
||||
# GithubOrganization.
|
||||
"prune-dead-branches",
|
||||
"days-to-keep",
|
||||
"number-to-keep",
|
||||
"periodic-folder-trigger",
|
||||
"github-org",
|
||||
"script-path",
|
||||
# Matrix.
|
||||
"execution-strategy",
|
||||
"yaml-strategy",
|
||||
"p4-strategy",
|
||||
"axes",
|
||||
# Maven.
|
||||
"maven",
|
||||
"per-module-email",
|
||||
# WorkflowMultiBranch.
|
||||
"sandbox",
|
||||
"script-id",
|
||||
"script-path",
|
||||
"prune-dead-branches",
|
||||
"days-to-keep",
|
||||
"number-to-keep",
|
||||
"periodic-folder-trigger",
|
||||
# Pipeline.
|
||||
"dsl",
|
||||
"sandbox",
|
||||
# project Workflow.
|
||||
"dsl",
|
||||
"sandbox",
|
||||
}
|
||||
|
||||
view_contents_keys = {
|
||||
# Common.
|
||||
"filter-executors",
|
||||
"filter-queue",
|
||||
# All
|
||||
# <nothing>
|
||||
# List.
|
||||
"job-name",
|
||||
"job-filters",
|
||||
"width",
|
||||
"alignment",
|
||||
"columns",
|
||||
"regex",
|
||||
"recurse",
|
||||
# Sectioned.
|
||||
"sections",
|
||||
# SectionedText.
|
||||
"width",
|
||||
"alignment",
|
||||
"text",
|
||||
"style",
|
||||
# DeliveryPipeline.
|
||||
"aggregated-changes-grouping-pattern",
|
||||
"allow-abort",
|
||||
"allow-manual-triggers",
|
||||
"allow-pipeline-start",
|
||||
"allow-rebuild",
|
||||
"link-relative",
|
||||
"link-to-console-log",
|
||||
"max-number-of-visible-pipelines",
|
||||
"name",
|
||||
"no-of-columns",
|
||||
"no-of-pipelines",
|
||||
"paging-enabled",
|
||||
"show-absolute-date-time",
|
||||
"show-aggregated-changes",
|
||||
"show-aggregated-pipeline",
|
||||
"show-avatars",
|
||||
"show-changes",
|
||||
"show-description",
|
||||
"show-promotions",
|
||||
"show-static-analysis-results",
|
||||
"show-test-results",
|
||||
"show-total-build-time",
|
||||
"update-interval",
|
||||
"sorting",
|
||||
"components",
|
||||
"regexps",
|
||||
# Nested.
|
||||
"views",
|
||||
"default-view",
|
||||
"columns",
|
||||
# Pipeline.
|
||||
"first-job",
|
||||
"name",
|
||||
"no-of-displayed-builds",
|
||||
"title",
|
||||
"link-style",
|
||||
"css-Url",
|
||||
"latest-job-only",
|
||||
"manual-trigger",
|
||||
"show-parameters",
|
||||
"parameters-in-headers",
|
||||
"start-with-parameters",
|
||||
"refresh-frequency",
|
||||
"definition-header",
|
||||
}
|
||||
|
||||
|
||||
def split_contents_params(data, contents_keys):
|
||||
contents = {key: value for key, value in data.items() if key in contents_keys}
|
||||
params = {key: value for key, value in data.items() if key not in contents_keys}
|
||||
return (contents, params)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Defaults:
|
||||
name: str
|
||||
params: dict
|
||||
contents: dict # Values that go to job contents.
|
||||
|
||||
@classmethod
|
||||
def add(cls, config, roots, expander, params_expander, data):
|
||||
d = {**data}
|
||||
name = d.pop("name")
|
||||
contents, params = split_contents_params(
|
||||
d, job_contents_keys | view_contents_keys
|
||||
)
|
||||
defaults = cls(name, params, contents)
|
||||
roots.defaults[name] = defaults
|
||||
|
||||
@classmethod
|
||||
def empty(cls):
|
||||
return Defaults("empty", params={}, contents={})
|
||||
|
||||
def merged_with_global(self, global_):
|
||||
return Defaults(
|
||||
name=f"{self.name}-merged-with-global",
|
||||
params={**global_.params, **self.params},
|
||||
contents={**global_.contents, **self.contents},
|
||||
)
|
89
jenkins_jobs/dimensions.py
Normal file
89
jenkins_jobs/dimensions.py
Normal file
@ -0,0 +1,89 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import itertools
|
||||
|
||||
from .errors import JenkinsJobsException
|
||||
|
||||
|
||||
def merge_dicts(dict_list):
|
||||
result = {}
|
||||
for d in dict_list:
|
||||
result.update(d)
|
||||
return result
|
||||
|
||||
|
||||
class DimensionsExpander:
|
||||
def __init__(self, context):
|
||||
self._context = context
|
||||
|
||||
def enum_dimensions_params(self, axes, params, defaults):
|
||||
if not axes:
|
||||
# No axes - instantiate one job/view.
|
||||
yield {}
|
||||
return
|
||||
dim_values = []
|
||||
for axis in axes:
|
||||
try:
|
||||
value = params[axis]
|
||||
except KeyError:
|
||||
try:
|
||||
value = defaults[axis]
|
||||
except KeyError:
|
||||
continue # May be, value would be received from an another axis values.
|
||||
value = self._decode_axis_value(axis, value)
|
||||
dim_values.append(value)
|
||||
for values in itertools.product(*dim_values):
|
||||
yield merge_dicts(values)
|
||||
|
||||
def _decode_axis_value(self, axis, value):
|
||||
if not isinstance(value, list):
|
||||
yield {axis: value}
|
||||
return
|
||||
for item in value:
|
||||
if not isinstance(item, dict):
|
||||
yield {axis: item}
|
||||
continue
|
||||
if len(item.items()) != 1:
|
||||
raise JenkinsJobsException(
|
||||
f"Invalid parameter {axis!r} definition for template {self._context!r}:"
|
||||
f" Expected a value or a dict with single element, but got: {item!r}"
|
||||
)
|
||||
value, p = next(iter(item.items()))
|
||||
yield {
|
||||
axis: value, # Point axis value.
|
||||
**p, # Point-specific parameters. May override asis value.
|
||||
}
|
||||
|
||||
def is_point_included(self, exclude_list, params):
|
||||
return not any(self._match_exclude(params, el) for el in exclude_list or [])
|
||||
|
||||
def _match_exclude(self, params, exclude):
|
||||
if not isinstance(exclude, dict):
|
||||
raise JenkinsJobsException(
|
||||
f"Template {self._context!r}: Exclude element should be dict, but is: {exclude!r}"
|
||||
)
|
||||
if not exclude:
|
||||
raise JenkinsJobsException(
|
||||
f"Template {self._context!r}: Exclude element should be dict, but is empty: {exclude!r}"
|
||||
)
|
||||
for axis, value in exclude.items():
|
||||
try:
|
||||
v = params[axis]
|
||||
except KeyError:
|
||||
raise JenkinsJobsException(
|
||||
f"Template {self._context!r}: Unknown axis {axis!r} for exclude element: {exclude!r}"
|
||||
)
|
||||
if value != v:
|
||||
return False
|
||||
# All required exclude values are matched.
|
||||
return True
|
214
jenkins_jobs/expander.py
Normal file
214
jenkins_jobs/expander.py
Normal file
@ -0,0 +1,214 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from functools import partial
|
||||
|
||||
from jinja2 import StrictUndefined
|
||||
|
||||
from .errors import JenkinsJobsException
|
||||
from .formatter import CustomFormatter, enum_str_format_required_params
|
||||
from .yaml_objects import (
|
||||
J2String,
|
||||
J2Yaml,
|
||||
YamlInclude,
|
||||
YamlListJoin,
|
||||
IncludeJinja2,
|
||||
IncludeRaw,
|
||||
IncludeRawEscape,
|
||||
)
|
||||
|
||||
|
||||
def expand_dict(expander, obj, params):
|
||||
result = {}
|
||||
for key, value in obj.items():
|
||||
expanded_key = expander.expand(key, params)
|
||||
expanded_value = expander.expand(value, params)
|
||||
result[expanded_key] = expanded_value
|
||||
return result
|
||||
|
||||
|
||||
def expand_list(expander, obj, params):
|
||||
return [expander.expand(item, params) for item in obj]
|
||||
|
||||
|
||||
def expand_tuple(expander, obj, params):
|
||||
return tuple(expander.expand(item, params) for item in obj)
|
||||
|
||||
|
||||
class StrExpander:
|
||||
def __init__(self, config):
|
||||
allow_empty = config.yamlparser["allow_empty_variables"]
|
||||
self._formatter = CustomFormatter(allow_empty)
|
||||
|
||||
def __call__(self, obj, params):
|
||||
return self._formatter.format(obj, **params)
|
||||
|
||||
|
||||
def call_expand(expander, obj, params):
|
||||
return obj.expand(expander, params)
|
||||
|
||||
|
||||
def call_subst(expander, obj, params):
|
||||
return obj.subst(expander, params)
|
||||
|
||||
|
||||
def dont_expand(obj, params):
|
||||
return obj
|
||||
|
||||
|
||||
yaml_classes_list = [
|
||||
J2String,
|
||||
J2Yaml,
|
||||
YamlInclude,
|
||||
YamlListJoin,
|
||||
IncludeJinja2,
|
||||
IncludeRaw,
|
||||
IncludeRawEscape,
|
||||
]
|
||||
|
||||
deprecated_yaml_tags = [
|
||||
("!include", YamlInclude),
|
||||
("!include-raw", IncludeRaw),
|
||||
("!include-raw-escape", IncludeRawEscape),
|
||||
]
|
||||
|
||||
|
||||
# Does not expand string formats. Used in jobs and macros without parameters.
|
||||
class Expander:
|
||||
def __init__(self, config):
|
||||
_yaml_object_expanders = {
|
||||
cls: partial(call_expand, self) for cls in yaml_classes_list
|
||||
}
|
||||
self.expanders = {
|
||||
dict: partial(expand_dict, self),
|
||||
list: partial(expand_list, self),
|
||||
tuple: partial(expand_tuple, self),
|
||||
str: dont_expand,
|
||||
bool: dont_expand,
|
||||
int: dont_expand,
|
||||
float: dont_expand,
|
||||
type(None): dont_expand,
|
||||
**_yaml_object_expanders,
|
||||
}
|
||||
|
||||
def expand(self, obj, params):
|
||||
t = type(obj)
|
||||
try:
|
||||
expander = self.expanders[t]
|
||||
except KeyError:
|
||||
raise RuntimeError(f"Do not know how to expand type: {t!r}")
|
||||
return expander(obj, params)
|
||||
|
||||
|
||||
# Expands string formats also. Used in jobs templates and macros with parameters.
|
||||
class ParamsExpander(Expander):
|
||||
def __init__(self, config):
|
||||
super().__init__(config)
|
||||
_yaml_object_expanders = {
|
||||
cls: partial(call_subst, self) for cls in yaml_classes_list
|
||||
}
|
||||
self.expanders.update(
|
||||
{
|
||||
str: StrExpander(config),
|
||||
**_yaml_object_expanders,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def call_required_params(obj):
|
||||
yield from obj.required_params
|
||||
|
||||
|
||||
def enum_dict_params(obj):
|
||||
for key, value in obj.items():
|
||||
yield from enum_required_params(key)
|
||||
yield from enum_required_params(value)
|
||||
|
||||
|
||||
def enum_seq_params(obj):
|
||||
for value in obj:
|
||||
yield from enum_required_params(value)
|
||||
|
||||
|
||||
def no_parameters(obj):
|
||||
return []
|
||||
|
||||
|
||||
yaml_classes_enumers = {cls: call_required_params for cls in yaml_classes_list}
|
||||
|
||||
param_enumers = {
|
||||
str: enum_str_format_required_params,
|
||||
dict: enum_dict_params,
|
||||
list: enum_seq_params,
|
||||
tuple: enum_seq_params,
|
||||
bool: no_parameters,
|
||||
int: no_parameters,
|
||||
float: no_parameters,
|
||||
type(None): no_parameters,
|
||||
**yaml_classes_enumers,
|
||||
}
|
||||
|
||||
# Do not expand these.
|
||||
disable_expand_for = {"template-name"}
|
||||
|
||||
|
||||
def enum_required_params(obj):
|
||||
t = type(obj)
|
||||
try:
|
||||
enumer = param_enumers[t]
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
f"Do not know how to enumerate required parameters for type: {t!r}"
|
||||
)
|
||||
return enumer(obj)
|
||||
|
||||
|
||||
def expand_parameters(expander, param_dict, template_name):
|
||||
expanded_params = {}
|
||||
deps = {} # Using dict as ordered set.
|
||||
|
||||
def expand(name):
|
||||
try:
|
||||
return expanded_params[name]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
format = param_dict[name]
|
||||
except KeyError:
|
||||
return StrictUndefined(name=name)
|
||||
if name in deps:
|
||||
raise RuntimeError(
|
||||
f"While expanding {name!r} for template {template_name!r}:"
|
||||
f" Recursive parameters usage: {name} <- {' <- '.join(deps)}"
|
||||
)
|
||||
if name in disable_expand_for:
|
||||
value = format
|
||||
else:
|
||||
required_params = list(enum_required_params(format))
|
||||
deps[name] = None
|
||||
try:
|
||||
params = {n: expand(n) for n in required_params}
|
||||
finally:
|
||||
deps.popitem()
|
||||
try:
|
||||
value = expander.expand(format, params)
|
||||
except JenkinsJobsException as x:
|
||||
used_by_deps = ", used by".join(f"{d!r}" for d in deps)
|
||||
raise RuntimeError(
|
||||
f"While expanding {name!r}, used by {used_by_deps}, used by template {template_name!r}: {x}"
|
||||
)
|
||||
expanded_params[name] = value
|
||||
return value
|
||||
|
||||
for name in param_dict:
|
||||
expand(name)
|
||||
return expanded_params
|
@ -15,82 +15,19 @@
|
||||
|
||||
# Manage interpolation of JJB variables into template strings.
|
||||
|
||||
import _string
|
||||
import logging
|
||||
from pprint import pformat
|
||||
import re
|
||||
from string import Formatter
|
||||
|
||||
from jenkins_jobs.errors import JenkinsJobsException
|
||||
from jenkins_jobs.local_yaml import CustomLoader
|
||||
from jinja2 import Undefined
|
||||
from jinja2.exceptions import UndefinedError
|
||||
|
||||
from .errors import JenkinsJobsException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def deep_format(obj, paramdict, allow_empty=False):
|
||||
"""Deep format configuration.
|
||||
|
||||
Apply the paramdict via str.format() to all string objects found within
|
||||
the supplied obj. Lists and dicts are traversed recursively.
|
||||
"""
|
||||
# YAML serialisation was originally used to achieve this, but that places
|
||||
# limitations on the values in paramdict - the post-format result must
|
||||
# still be valid YAML (so substituting-in a string containing quotes, for
|
||||
# example, is problematic).
|
||||
if hasattr(obj, "format"):
|
||||
try:
|
||||
ret = CustomFormatter(allow_empty).format(obj, **paramdict)
|
||||
except KeyError as exc:
|
||||
missing_key = exc.args[0]
|
||||
desc = "%s parameter missing to format %s\nGiven:\n%s" % (
|
||||
missing_key,
|
||||
obj,
|
||||
pformat(paramdict),
|
||||
)
|
||||
raise JenkinsJobsException(desc)
|
||||
except Exception:
|
||||
logging.error(
|
||||
"Problem formatting with args:\nallow_empty:"
|
||||
"%s\nobj: %s\nparamdict: %s" % (allow_empty, obj, paramdict)
|
||||
)
|
||||
raise
|
||||
|
||||
elif isinstance(obj, list):
|
||||
ret = type(obj)()
|
||||
for item in obj:
|
||||
ret.append(deep_format(item, paramdict, allow_empty))
|
||||
elif isinstance(obj, dict):
|
||||
ret = type(obj)()
|
||||
for item in obj:
|
||||
try:
|
||||
ret[deep_format(item, paramdict, allow_empty)] = deep_format(
|
||||
obj[item], paramdict, allow_empty
|
||||
)
|
||||
except KeyError as exc:
|
||||
missing_key = exc.args[0]
|
||||
desc = "%s parameter missing to format %s\nGiven:\n%s" % (
|
||||
missing_key,
|
||||
obj,
|
||||
pformat(paramdict),
|
||||
)
|
||||
raise JenkinsJobsException(desc)
|
||||
except Exception:
|
||||
logging.error(
|
||||
"Problem formatting with args:\nallow_empty:"
|
||||
"%s\nobj: %s\nparamdict: %s" % (allow_empty, obj, paramdict)
|
||||
)
|
||||
raise
|
||||
else:
|
||||
ret = obj
|
||||
if isinstance(ret, CustomLoader):
|
||||
# If we have a CustomLoader here, we've lazily-loaded a template
|
||||
# or rendered a template to a piece of YAML;
|
||||
# attempt to format it.
|
||||
ret = deep_format(
|
||||
ret.get_object_to_format(), paramdict, allow_empty=allow_empty
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
class CustomFormatter(Formatter):
|
||||
"""
|
||||
Custom formatter to allow non-existing key references when formatting a
|
||||
@ -104,25 +41,25 @@ class CustomFormatter(Formatter):
|
||||
(?:\|(?P<default>[^}]*))? # default fallback
|
||||
}(}})*(?!}) # non-pair closing }
|
||||
"""
|
||||
_matcher = re.compile(_expr, re.VERBOSE)
|
||||
_whole_matcher = re.compile(f"^{_expr}$", re.VERBOSE)
|
||||
|
||||
def __init__(self, allow_empty=False):
|
||||
super(CustomFormatter, self).__init__()
|
||||
super().__init__()
|
||||
self.allow_empty = allow_empty
|
||||
|
||||
def vformat(self, format_string, args, kwargs):
|
||||
matcher = re.compile(self._expr, re.VERBOSE)
|
||||
|
||||
# special case of returning the object if the entire string
|
||||
# matches a single parameter
|
||||
try:
|
||||
result = re.match("^%s$" % self._expr, format_string, re.VERBOSE)
|
||||
except TypeError:
|
||||
return format_string.format(**kwargs)
|
||||
# Special case of returning the object preserving it's type if the entire string
|
||||
# matches a single parameter.
|
||||
result = self._whole_matcher.match(format_string)
|
||||
if result is not None:
|
||||
try:
|
||||
return kwargs[result.group("key")]
|
||||
value = kwargs[result.group("key")]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if not isinstance(value, Undefined):
|
||||
return value
|
||||
|
||||
# handle multiple fields within string via a callback to re.sub()
|
||||
def re_replace(match):
|
||||
@ -130,23 +67,65 @@ class CustomFormatter(Formatter):
|
||||
default = match.group("default")
|
||||
|
||||
if default is not None:
|
||||
if key not in kwargs:
|
||||
if key not in kwargs or isinstance(kwargs[key], Undefined):
|
||||
return default
|
||||
else:
|
||||
return "{%s}" % key
|
||||
return match.group(0)
|
||||
|
||||
format_string = matcher.sub(re_replace, format_string)
|
||||
format_string = self._matcher.sub(re_replace, format_string)
|
||||
|
||||
return Formatter.vformat(self, format_string, args, kwargs)
|
||||
try:
|
||||
return super().vformat(format_string, args, kwargs)
|
||||
except (JenkinsJobsException, UndefinedError) as x:
|
||||
if len(format_string) > 40:
|
||||
short_fmt = format_string[:80] + "..."
|
||||
else:
|
||||
short_fmt = format_string
|
||||
raise JenkinsJobsException(f"While formatting string {short_fmt!r}: {x}")
|
||||
|
||||
def enum_required_params(self, format_string):
|
||||
def re_replace(match):
|
||||
key = match.group("key")
|
||||
return "{%s}" % key
|
||||
|
||||
prepared_format_string = self._matcher.sub(re_replace, format_string)
|
||||
for literal_text, field_name, format_spec, conversion in self.parse(
|
||||
prepared_format_string
|
||||
):
|
||||
if field_name is None:
|
||||
continue
|
||||
arg_used, rest = _string.formatter_field_name_split(field_name)
|
||||
if arg_used == "" or type(arg_used) is int:
|
||||
raise RuntimeError(
|
||||
f"Positional format arguments are not supported: {format_string!r}"
|
||||
)
|
||||
yield arg_used
|
||||
|
||||
def enum_param_defaults(self, format_string):
|
||||
for match in self._matcher.finditer(format_string):
|
||||
key = match.group("key")
|
||||
default = match.group("default")
|
||||
if default is not None:
|
||||
yield (key, default)
|
||||
|
||||
def get_value(self, key, args, kwargs):
|
||||
try:
|
||||
return Formatter.get_value(self, key, args, kwargs)
|
||||
return super().get_value(key, args, kwargs)
|
||||
except KeyError:
|
||||
if self.allow_empty:
|
||||
logger.debug(
|
||||
"Found uninitialized key %s, replaced with empty string", key
|
||||
)
|
||||
return ""
|
||||
raise
|
||||
raise JenkinsJobsException(f"Missing parameter: {key!r}")
|
||||
|
||||
|
||||
def enum_str_format_required_params(format):
|
||||
formatter = CustomFormatter()
|
||||
yield from formatter.enum_required_params(format)
|
||||
|
||||
|
||||
def enum_str_format_param_defaults(format):
|
||||
formatter = CustomFormatter()
|
||||
yield from formatter.enum_param_defaults(format)
|
||||
|
107
jenkins_jobs/job.py
Normal file
107
jenkins_jobs/job.py
Normal file
@ -0,0 +1,107 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .root_base import RootBase, NonTemplateRootMixin, TemplateRootMixin, Group
|
||||
from .defaults import split_contents_params, job_contents_keys
|
||||
|
||||
|
||||
@dataclass
|
||||
class JobBase(RootBase):
|
||||
project_type: str
|
||||
folder: str
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, config, roots, expander, data):
|
||||
keep_descriptions = config.yamlparser["keep_descriptions"]
|
||||
d = {**data}
|
||||
name = d.pop("name")
|
||||
id = d.pop("id", None)
|
||||
description = d.pop("description", None)
|
||||
defaults = d.pop("defaults", "global")
|
||||
project_type = d.pop("project-type", None)
|
||||
folder = d.pop("folder", None)
|
||||
contents, params = split_contents_params(d, job_contents_keys)
|
||||
return cls(
|
||||
roots.defaults,
|
||||
expander,
|
||||
keep_descriptions,
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
defaults,
|
||||
params,
|
||||
contents,
|
||||
project_type,
|
||||
folder,
|
||||
)
|
||||
|
||||
def _as_dict(self):
|
||||
data = {
|
||||
"name": self._full_name,
|
||||
**self.contents,
|
||||
}
|
||||
if self.project_type:
|
||||
data["project-type"] = self.project_type
|
||||
return data
|
||||
|
||||
@property
|
||||
def _full_name(self):
|
||||
if self.folder:
|
||||
return f"{self.folder}/{self.name}"
|
||||
else:
|
||||
return self.name
|
||||
|
||||
|
||||
class Job(JobBase, NonTemplateRootMixin):
|
||||
@classmethod
|
||||
def add(cls, config, roots, expander, param_expander, data):
|
||||
job = cls.from_dict(config, roots, expander, data)
|
||||
roots.assign(roots.jobs, job.id, job, "job")
|
||||
|
||||
|
||||
class JobTemplate(JobBase, TemplateRootMixin):
|
||||
@classmethod
|
||||
def add(cls, config, roots, expander, params_expander, data):
|
||||
template = cls.from_dict(config, roots, params_expander, data)
|
||||
roots.assign(roots.job_templates, template.id, template, "job template")
|
||||
|
||||
|
||||
@dataclass
|
||||
class JobGroup(Group):
|
||||
_jobs: dict
|
||||
_job_templates: dict
|
||||
|
||||
@classmethod
|
||||
def add(cls, config, roots, expander, params_expander, data):
|
||||
d = {**data}
|
||||
name = d.pop("name")
|
||||
job_specs = [
|
||||
cls._spec_from_dict(item, error_context=f"Job group {name}")
|
||||
for item in d.pop("jobs", [])
|
||||
]
|
||||
group = cls(
|
||||
name,
|
||||
job_specs,
|
||||
d,
|
||||
roots.jobs,
|
||||
roots.job_templates,
|
||||
)
|
||||
roots.assign(roots.job_groups, group.name, group, "job group")
|
||||
|
||||
def __str__(self):
|
||||
return f"Job group {self.name}"
|
||||
|
||||
@property
|
||||
def _root_dicts(self):
|
||||
return [self._jobs, self._job_templates]
|
151
jenkins_jobs/loader.py
Normal file
151
jenkins_jobs/loader.py
Normal file
@ -0,0 +1,151 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import io
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
import yaml
|
||||
|
||||
from .errors import JenkinsJobsException
|
||||
from .yaml_objects import BaseYamlObject
|
||||
from .expander import Expander, ParamsExpander, deprecated_yaml_tags, yaml_classes_list
|
||||
from .roots import root_adders
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Loader(yaml.Loader):
|
||||
@classmethod
|
||||
def empty(cls, jjb_config):
|
||||
return cls(io.StringIO(), jjb_config)
|
||||
|
||||
def __init__(self, stream, jjb_config, source_path=None, anchors=None):
|
||||
super().__init__(stream)
|
||||
self.jjb_config = jjb_config
|
||||
self.source_path = source_path
|
||||
self._retain_anchors = jjb_config.yamlparser["retain_anchors"]
|
||||
if anchors:
|
||||
# Override default set by super class.
|
||||
self.anchors = anchors
|
||||
|
||||
# Override the default composer to skip resetting the anchors at the
|
||||
# end of the current document.
|
||||
def compose_document(self):
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.get_event()
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.get_event()
|
||||
return node
|
||||
|
||||
def _with_stream(self, stream, source_path):
|
||||
return Loader(stream, self.jjb_config, source_path, self.anchors)
|
||||
|
||||
def load_fp(self, fp):
|
||||
return self.load(fp)
|
||||
|
||||
def load_path(self, path):
|
||||
return self.load(path.read_text(), source_path=path)
|
||||
|
||||
def load(self, stream, source_path=None):
|
||||
loader = self._with_stream(stream, source_path)
|
||||
try:
|
||||
return loader.get_single_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
if self._retain_anchors:
|
||||
self.anchors.update(loader.anchors)
|
||||
|
||||
|
||||
def load_deprecated_yaml(tag, cls, loader, node):
|
||||
logger.warning("Tag %r is deprecated, switch to using %r", tag, cls.yaml_tag)
|
||||
return cls.from_yaml(loader, node)
|
||||
|
||||
|
||||
for cls in yaml_classes_list:
|
||||
yaml.add_constructor(cls.yaml_tag, cls.from_yaml, Loader)
|
||||
|
||||
for tag, cls in deprecated_yaml_tags:
|
||||
yaml.add_constructor(tag, partial(load_deprecated_yaml, tag, cls), Loader)
|
||||
|
||||
|
||||
def is_stdin(path):
|
||||
return hasattr(path, "read")
|
||||
|
||||
|
||||
def enum_expanded_paths(path_list):
|
||||
visited_set = set()
|
||||
|
||||
def real(path):
|
||||
real_path = path.resolve()
|
||||
if real_path in visited_set:
|
||||
logger.warning(
|
||||
"File '%s' is already added as '%s'; ignoring reference to avoid"
|
||||
" duplicating YAML definitions.",
|
||||
path,
|
||||
real_path,
|
||||
)
|
||||
else:
|
||||
yield real_path
|
||||
visited_set.add(real_path)
|
||||
|
||||
for path in path_list:
|
||||
if is_stdin(path):
|
||||
yield path
|
||||
elif path.is_dir():
|
||||
for p in path.iterdir():
|
||||
if p.suffix in {".yml", ".yaml"}:
|
||||
yield from real(p)
|
||||
else:
|
||||
yield from real(path)
|
||||
|
||||
|
||||
def load_files(config, roots, path_list):
|
||||
expander = Expander(config)
|
||||
params_expander = ParamsExpander(config)
|
||||
loader = Loader.empty(config)
|
||||
for path in enum_expanded_paths(path_list):
|
||||
if is_stdin(path):
|
||||
data = loader.load_fp(path)
|
||||
else:
|
||||
data = loader.load_path(path)
|
||||
if not isinstance(data, list):
|
||||
raise JenkinsJobsException(
|
||||
f"The topmost collection in file '{path}' must be a list,"
|
||||
f" not a {type(data)}"
|
||||
)
|
||||
for item in data:
|
||||
if not isinstance(item, dict):
|
||||
raise JenkinsJobsException(
|
||||
f"{path}: Topmost list should contain single-item dict,"
|
||||
f" not a {type(item)}. Missing indent?"
|
||||
)
|
||||
if len(item) != 1:
|
||||
raise JenkinsJobsException(
|
||||
f"{path}: Topmost dict should be single-item,"
|
||||
f" but have keys {item.keys()}. Missing indent?"
|
||||
)
|
||||
kind, contents = next(iter(item.items()))
|
||||
if kind.startswith("_"):
|
||||
continue
|
||||
if isinstance(contents, BaseYamlObject):
|
||||
contents = contents.expand(expander, params={})
|
||||
try:
|
||||
adder = root_adders[kind]
|
||||
except KeyError:
|
||||
raise JenkinsJobsException(
|
||||
f"{path}: Unknown topmost element type : {kind!r},"
|
||||
f" Known are: {','.join(root_adders)}."
|
||||
)
|
||||
adder(config, roots, expander, params_expander, contents)
|
@ -1,676 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (C) 2013 Hewlett-Packard.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Provides local yaml parsing classes and extend yaml module
|
||||
|
||||
"""Custom application specific yamls tags are supported to provide
|
||||
enhancements when reading yaml configuration.
|
||||
|
||||
Action Tags
|
||||
^^^^^^^^^^^
|
||||
|
||||
These allow manipulation of data being stored in one layout in the source
|
||||
yaml for convenience and/or clarity, to another format to be processed by
|
||||
the targeted module instead of requiring all modules in JJB being capable
|
||||
of supporting multiple input formats.
|
||||
|
||||
The tag ``!join:`` will treat the first element of the following list as
|
||||
the delimiter to use, when joining the remaining elements into a string
|
||||
and returning a single string to be consumed by the specified module option.
|
||||
|
||||
This allows users to maintain elements of data in a list structure for ease
|
||||
of review/maintenance, and have the yaml parser convert it to a string for
|
||||
consumption as any argument for modules. The main expected use case is to
|
||||
allow for generic plugin data such as shell properties to be populated from
|
||||
a list construct which the yaml parser converts to a single string, instead
|
||||
of trying to support this within the module code which would require a
|
||||
templating engine similar to Jinja.
|
||||
|
||||
Generic Example:
|
||||
|
||||
.. literalinclude:: /../../tests/localyaml/fixtures/joinlists.yaml
|
||||
|
||||
|
||||
Environment Inject:
|
||||
|
||||
.. literalinclude:: /../../tests/yamlparser/job_fixtures/string_join.yaml
|
||||
|
||||
|
||||
While this mechanism can also be used items where delimiters are supported by
|
||||
the module, that should be considered a bug that the existing code doesn't
|
||||
handle being provided a list and delimiter to perform the correct conversion
|
||||
for you. Should you discover a module that takes arguments with delimiters and
|
||||
the existing JJB codebase does not handle accepting lists, then this can be
|
||||
used as a temporary solution in place of using very long strings:
|
||||
|
||||
Extended Params Example:
|
||||
|
||||
.. literalinclude::
|
||||
/../../tests/parameters/fixtures/extended-choice-param-full.yaml
|
||||
|
||||
|
||||
Inclusion Tags
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
These allow inclusion of arbitrary files as a method of having blocks of data
|
||||
managed separately to the yaml job configurations. A specific usage of this is
|
||||
inlining scripts contained in separate files, although such tags may also be
|
||||
used to simplify usage of macros or job templates.
|
||||
|
||||
The tag ``!include:`` will treat the following string as file which should be
|
||||
parsed as yaml configuration data.
|
||||
|
||||
Example:
|
||||
|
||||
.. literalinclude:: /../../tests/localyaml/fixtures/include001.yaml
|
||||
|
||||
contents of include001.yaml.inc:
|
||||
|
||||
.. literalinclude:: /../../tests/yamlparser/job_fixtures/include001.yaml.inc
|
||||
|
||||
|
||||
The tag ``!include-raw:`` will treat the given string or list of strings as
|
||||
filenames to be opened as one or more data blob, which should be read into
|
||||
the calling yaml construct without any further parsing. Any data in a file
|
||||
included through this tag, will be treated as string data.
|
||||
|
||||
Examples:
|
||||
|
||||
.. literalinclude:: /../../tests/localyaml/fixtures/include-raw001.yaml
|
||||
|
||||
contents of include-raw001-hello-world.sh:
|
||||
|
||||
.. literalinclude::
|
||||
/../../tests/localyaml/fixtures/include-raw001-hello-world.sh
|
||||
|
||||
contents of include-raw001-vars.sh:
|
||||
|
||||
.. literalinclude::
|
||||
/../../tests/localyaml/fixtures/include-raw001-vars.sh
|
||||
|
||||
using a list of files:
|
||||
|
||||
.. literalinclude::
|
||||
/../../tests/localyaml/fixtures/include-raw-multi001.yaml
|
||||
|
||||
The tag |