Move tests to pytest

Pytest makes each scenario into individual selectable test.
To be able to run each scenario separately is very useful for development.

Change-Id: I4b1c990a1fd839ce327cd7faa27159a9b9632fed
This commit is contained in:
Vsevolod Fedorov 2022-06-15 11:12:23 +03:00
parent 248a2bddb7
commit f4d64f9f66
49 changed files with 2042 additions and 1953 deletions

View File

@ -90,6 +90,10 @@ execute the command::
tox -e py38 tox -e py38
Unit tests could be run in parallel, using pytest-parallel pytest plugin::
tox -e py38 -- --workers=auto
* Note: View ``tox.ini`` to run tests on other versions of Python, * Note: View ``tox.ini`` to run tests on other versions of Python,
generating the documentation and additionally for any special notes generating the documentation and additionally for any special notes
on running the test to validate documentation external URLs from behind on running the test to validate documentation external URLs from behind

View File

@ -3,13 +3,11 @@
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
coverage>=4.0 # Apache-2.0 coverage>=4.0 # Apache-2.0
fixtures>=3.0.0 # Apache-2.0/BSD
python-subunit>=0.0.18 # Apache-2.0/BSD python-subunit>=0.0.18 # Apache-2.0/BSD
sphinx>=4.4.0 sphinx>=4.4.0
testscenarios>=0.4 # Apache-2.0/BSD
testtools>=1.4.0 # MIT testtools>=1.4.0 # MIT
stestr>=2.0.0,!=3.0.0 # Apache-2.0/BSD
tox>=2.9.1 # MIT tox>=2.9.1 # MIT
mock>=2.0; python_version < '3.0' # BSD
sphinxcontrib-programoutput sphinxcontrib-programoutput
pluggy<1.0.0 # the last version that supports Python 2 pytest==7.1.2
pytest-mock==3.7.0
pytest-parallel==0.1.1

View File

View File

@ -1,409 +0,0 @@
#!/usr/bin/env python
#
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import doctest
import configparser
import io
import json
import logging
import os
import pkg_resources
import re
import xml.etree.ElementTree as XML
import fixtures
import six
from six.moves import StringIO
import testtools
from testtools.content import text_content
import testscenarios
from yaml import safe_dump
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.errors import InvalidAttributeError
import jenkins_jobs.local_yaml as yaml
from jenkins_jobs.alphanum import AlphanumSort
from jenkins_jobs.modules import project_externaljob
from jenkins_jobs.modules import project_flow
from jenkins_jobs.modules import project_githuborg
from jenkins_jobs.modules import project_matrix
from jenkins_jobs.modules import project_maven
from jenkins_jobs.modules import project_multibranch
from jenkins_jobs.modules import project_multijob
from jenkins_jobs.modules import view_all
from jenkins_jobs.modules import view_delivery_pipeline
from jenkins_jobs.modules import view_list
from jenkins_jobs.modules import view_nested
from jenkins_jobs.modules import view_pipeline
from jenkins_jobs.modules import view_sectioned
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJob
from jenkins_jobs.xml_config import XmlJobGenerator
# This dance deals with the fact that we want unittest.mock if
# we're on Python 3.4 and later, and non-stdlib mock otherwise.
try:
from unittest import mock # noqa
except ImportError:
import mock # noqa
def get_scenarios(
fixtures_path,
in_ext="yaml",
out_ext="xml",
plugins_info_ext="plugins_info.yaml",
filter_func=None,
):
"""Returns a list of scenarios, each scenario being described
by two parameters (yaml and xml filenames by default).
- content of the fixture output file (aka expected)
"""
scenarios = []
files = {}
for dirpath, _, fs in os.walk(fixtures_path):
for fn in fs:
if fn in files:
files[fn].append(os.path.join(dirpath, fn))
else:
files[fn] = [os.path.join(dirpath, fn)]
input_files = [
files[f][0] for f in files if re.match(r".*\.{0}$".format(in_ext), f)
]
for input_filename in input_files:
if input_filename.endswith(plugins_info_ext):
continue
if callable(filter_func) and filter_func(input_filename):
continue
output_candidate = re.sub(
r"\.{0}$".format(in_ext), ".{0}".format(out_ext), input_filename
)
# assume empty file if no output candidate found
if os.path.basename(output_candidate) in files:
out_filenames = files[os.path.basename(output_candidate)]
else:
out_filenames = None
plugins_info_candidate = re.sub(
r"\.{0}$".format(in_ext), ".{0}".format(plugins_info_ext), input_filename
)
if os.path.basename(plugins_info_candidate) not in files:
plugins_info_candidate = None
conf_candidate = re.sub(r"\.yaml$|\.json$", ".conf", input_filename)
conf_filename = files.get(os.path.basename(conf_candidate), None)
if conf_filename:
conf_filename = conf_filename[0]
else:
# for testing purposes we want to avoid using user config files
conf_filename = os.devnull
scenarios.append(
(
input_filename,
{
"in_filename": input_filename,
"out_filenames": out_filenames,
"conf_filename": conf_filename,
"plugins_info_filename": plugins_info_candidate,
},
)
)
return scenarios
class BaseTestCase(testtools.TestCase):
# TestCase settings:
maxDiff = None # always dump text difference
longMessage = True # keep normal error message when providing our
def setUp(self):
super(BaseTestCase, self).setUp()
self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
def _read_utf8_content(self):
# if None assume empty file
if not self.out_filenames:
return ""
# Read XML content, assuming it is unicode encoded
xml_content = ""
for f in sorted(self.out_filenames):
with io.open(f, "r", encoding="utf-8") as xml_file:
xml_content += "%s" % xml_file.read()
return xml_content
def _read_yaml_content(self, filename):
with io.open(filename, "r", encoding="utf-8") as yaml_file:
yaml_content = yaml.load(yaml_file)
return yaml_content
def _get_config(self):
jjb_config = JJBConfig(self.conf_filename)
jjb_config.validate()
return jjb_config
class BaseScenariosTestCase(testscenarios.TestWithScenarios, BaseTestCase):
scenarios = []
fixtures_path = None
@mock.patch("pkg_resources.iter_entry_points")
def test_yaml_snippet(self, mock):
if not self.in_filename:
return
jjb_config = self._get_config()
expected_xml = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail(
"plugins-info-filename", text_content(self.plugins_info_filename)
)
self.addDetail("plugins-info", text_content(str(plugins_info)))
parser = YamlParser(jjb_config)
e = pkg_resources.EntryPoint.parse
d = pkg_resources.Distribution()
config = configparser.ConfigParser()
config.read(os.path.dirname(__file__) + "/../setup.cfg")
groups = {}
for key in config["entry_points"]:
groups[key] = list()
for line in config["entry_points"][key].split("\n"):
if "" == line.strip():
continue
groups[key].append(e(line, dist=d))
def mock_iter_entry_points(group, name=None):
return (
entry for entry in groups[group] if name is None or name == entry.name
)
mock.side_effect = mock_iter_entry_points
registry = ModuleRegistry(jjb_config, plugins_info)
registry.set_parser_data(parser.data)
pub = self.klass(registry)
project = None
if "project-type" in yaml_content:
if yaml_content["project-type"] == "maven":
project = project_maven.Maven(registry)
elif yaml_content["project-type"] == "matrix":
project = project_matrix.Matrix(registry)
elif yaml_content["project-type"] == "flow":
project = project_flow.Flow(registry)
elif yaml_content["project-type"] == "githuborg":
project = project_githuborg.GithubOrganization(registry)
elif yaml_content["project-type"] == "multijob":
project = project_multijob.MultiJob(registry)
elif yaml_content["project-type"] == "multibranch":
project = project_multibranch.WorkflowMultiBranch(registry)
elif yaml_content["project-type"] == "multibranch-defaults":
project = project_multibranch.WorkflowMultiBranchDefaults(
registry
) # noqa
elif yaml_content["project-type"] == "externaljob":
project = project_externaljob.ExternalJob(registry)
if "view-type" in yaml_content:
if yaml_content["view-type"] == "all":
project = view_all.All(registry)
elif yaml_content["view-type"] == "delivery_pipeline":
project = view_delivery_pipeline.DeliveryPipeline(registry)
elif yaml_content["view-type"] == "list":
project = view_list.List(registry)
elif yaml_content["view-type"] == "nested":
project = view_nested.Nested(registry)
elif yaml_content["view-type"] == "pipeline":
project = view_pipeline.Pipeline(registry)
elif yaml_content["view-type"] == "sectioned":
project = view_sectioned.Sectioned(registry)
else:
raise InvalidAttributeError("view-type", yaml_content["view-type"])
if project:
xml_project = project.root_xml(yaml_content)
else:
xml_project = XML.Element("project")
# Generate the XML tree directly with modules/general
pub.gen_xml(xml_project, yaml_content)
# check output file is under correct path
if "name" in yaml_content:
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = [
os.path.normpath(
os.path.join(
prefix,
"/".join(parser._getfullname(yaml_content).split("/")[:-1]),
)
)
]
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
self.assertEquals(
expected_folders,
actual_folders,
"Output file under wrong path, was '%s', should be '%s'"
% (
self.out_filenames[0],
os.path.join(
expected_folders[0], os.path.basename(self.out_filenames[0])
),
),
)
# Prettify generated XML
pretty_xml = XmlJob(xml_project, "fixturejob").output().decode("utf-8")
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(
expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class SingleJobTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
config = self._get_config()
expected_xml = (
self._read_utf8_content()
.strip()
.replace("<BLANKLINE>", "")
.replace("\n\n", "\n")
)
parser = YamlParser(config)
parser.parse(self.in_filename)
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail(
"plugins-info-filename", text_content(self.plugins_info_filename)
)
self.addDetail("plugins-info", text_content(str(plugins_info)))
registry = ModuleRegistry(config, plugins_info)
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry)
# Generate the XML tree
xml_generator = XmlJobGenerator(registry)
xml_jobs = xml_generator.generateXML(job_data_list)
xml_jobs.sort(key=AlphanumSort)
# check reference files are under correct path for folders
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = list(
set(
[
os.path.normpath(
os.path.join(prefix, "/".join(job_data["name"].split("/")[:-1]))
)
for job_data in job_data_list
]
)
)
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
six.assertCountEqual(
self,
expected_folders,
actual_folders,
"Output file under wrong path, was '%s', should be '%s'"
% (
self.out_filenames[0],
os.path.join(
expected_folders[0], os.path.basename(self.out_filenames[0])
),
),
)
# Prettify generated XML
pretty_xml = (
"\n".join(job.output().decode("utf-8") for job in xml_jobs)
.strip()
.replace("\n\n", "\n")
)
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(
expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class JsonTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
expected_json = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
pretty_json = json.dumps(yaml_content, indent=4, separators=(",", ": "))
self.assertThat(
pretty_json,
testtools.matchers.DocTestMatches(
expected_json, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class YamlTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
expected_yaml = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
# using json forces expansion of yaml anchors and aliases in the
# outputted yaml, otherwise it would simply appear exactly as
# entered which doesn't show that the net effect of the yaml
data = StringIO(json.dumps(yaml_content))
pretty_yaml = safe_dump(json.load(data), default_flow_style=False)
self.assertThat(
pretty_yaml,
testtools.matchers.DocTestMatches(
expected_yaml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)

View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
#
# Joint copyright: # Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation # - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso # - Copyright 2012,2013 Antoine "hashar" Musso
@ -15,13 +17,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import builders from jenkins_jobs.modules import builders
from tests import base
class TestCaseModuleBuilders(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = builders.Builders @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(builders.Builders)

View File

@ -13,33 +13,40 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os import os.path
import pytest
import jenkins_jobs import jenkins_jobs
from tests import base
from tests.base import mock
class TestCaseJobCache(base.BaseTestCase): # Override fixture - do not use this mock.
@mock.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file") @pytest.fixture(autouse=True)
def test_save_on_exit(self): def job_cache_mocked(mocker):
""" pass
Test that the cache is saved on normal object deletion
"""
with mock.patch("jenkins_jobs.builder.JobCache.save") as save_mock:
with mock.patch("os.path.isfile", return_value=False):
with mock.patch("jenkins_jobs.builder.JobCache._lock"):
jenkins_jobs.builder.JobCache("dummy")
save_mock.assert_called_with()
@mock.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file") def test_save_on_exit(mocker):
def test_cache_file(self): """
""" Test that the cache is saved on normal object deletion
Test providing a cachefile. """
""" mocker.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
test_file = os.path.abspath(__file__)
with mock.patch("os.path.join", return_value=test_file): save_mock = mocker.patch("jenkins_jobs.builder.JobCache.save")
with mock.patch("yaml.safe_load"): mocker.patch("os.path.isfile", return_value=False)
with mock.patch("jenkins_jobs.builder.JobCache._lock"): mocker.patch("jenkins_jobs.builder.JobCache._lock")
jenkins_jobs.builder.JobCache("dummy").data = None jenkins_jobs.builder.JobCache("dummy")
save_mock.assert_called_with()
def test_cache_file(mocker):
"""
Test providing a cachefile.
"""
mocker.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
test_file = os.path.abspath(__file__)
mocker.patch("os.path.join", return_value=test_file)
mocker.patch("yaml.safe_load")
mocker.patch("jenkins_jobs.builder.JobCache._lock")
jenkins_jobs.builder.JobCache("dummy").data = None

View File

24
tests/cmd/conftest.py Normal file
View File

@ -0,0 +1,24 @@
from pathlib import Path
import pytest
from jenkins_jobs.cli import entry
@pytest.fixture
def fixtures_dir():
return Path(__file__).parent / "fixtures"
@pytest.fixture
def default_config_file(fixtures_dir):
return str(fixtures_dir / "empty_builder.ini")
@pytest.fixture
def execute_jenkins_jobs():
def execute(args):
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
return execute

View File

@ -0,0 +1,3 @@
[jenkins]
url=http://test-jenkins.with.non.default.url:8080/
query_plugins_info=True

View File

@ -18,55 +18,57 @@
# of actions by the JJB library, usually through interaction with the # of actions by the JJB library, usually through interaction with the
# python-jenkins library. # python-jenkins library.
import os from unittest import mock
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) def test_delete_single_job(mocker, default_config_file, execute_jenkins_jobs):
class DeleteTests(CmdTestsBase): """
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_jobs") Test handling the deletion of a single Jenkins job.
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_views") """
def test_delete_single_job(self, delete_job_mock, delete_view_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ["--conf", self.default_config_file, "delete", "test_job"] mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_jobs")
self.execute_jenkins_jobs_with_args(args) mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_views")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_jobs") args = ["--conf", default_config_file, "delete", "test_job"]
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_views") execute_jenkins_jobs(args)
def test_delete_multiple_jobs(self, delete_job_mock, delete_view_mock):
"""
Test handling the deletion of multiple Jenkins jobs.
"""
args = ["--conf", self.default_config_file, "delete", "test_job1", "test_job2"]
self.execute_jenkins_jobs_with_args(args)
@mock.patch("jenkins_jobs.builder.JenkinsManager.delete_job") def test_delete_multiple_jobs(mocker, default_config_file, execute_jenkins_jobs):
def test_delete_using_glob_params(self, delete_job_mock): """
""" Test handling the deletion of multiple Jenkins jobs.
Test handling the deletion of multiple Jenkins jobs using the glob """
parameters feature.
"""
args = [ mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_jobs")
"--conf", mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_views")
self.default_config_file,
"delete", args = ["--conf", default_config_file, "delete", "test_job1", "test_job2"]
"--path", execute_jenkins_jobs(args)
os.path.join(self.fixtures_path, "cmd-002.yaml"),
"*bar*",
] def test_delete_using_glob_params(
self.execute_jenkins_jobs_with_args(args) mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
calls = [mock.call("bar001"), mock.call("bar002")] ):
delete_job_mock.assert_has_calls(calls, any_order=True) """
self.assertEqual( Test handling the deletion of multiple Jenkins jobs using the glob
delete_job_mock.call_count, parameters feature.
len(calls), """
"Jenkins.delete_job() was called '%s' times when "
"expected '%s'" % (delete_job_mock.call_count, len(calls)), delete_job_mock = mocker.patch("jenkins_jobs.builder.JenkinsManager.delete_job")
)
args = [
"--conf",
default_config_file,
"delete",
"--path",
str(fixtures_dir / "cmd-002.yaml"),
"*bar*",
]
execute_jenkins_jobs(args)
calls = [mock.call("bar001"), mock.call("bar002")]
delete_job_mock.assert_has_calls(calls, any_order=True)
assert delete_job_mock.call_count == len(
calls
), "Jenkins.delete_job() was called '%s' times when " "expected '%s'" % (
delete_job_mock.call_count,
len(calls),
)

View File

@ -17,31 +17,30 @@
# of actions by the JJB library, usually through interaction with the # of actions by the JJB library, usually through interaction with the
# python-jenkins library. # python-jenkins library.
from tests.base import mock import pytest
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) def test_delete_all_accept(mocker, default_config_file, execute_jenkins_jobs):
class DeleteAllTests(CmdTestsBase): """
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_all_jobs") Test handling the deletion of a single Jenkins job.
def test_delete_all_accept(self, delete_job_mock): """
"""
Test handling the deletion of a single Jenkins job.
"""
args = ["--conf", self.default_config_file, "delete-all"] mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_all_jobs")
with mock.patch( mocker.patch("jenkins_jobs.builder.JenkinsManager.get_views", return_value=[None])
"jenkins_jobs.builder.JenkinsManager.get_views", return_value=[None] mocker.patch("jenkins_jobs.utils.input", return_value="y")
):
with mock.patch("jenkins_jobs.utils.input", return_value="y"):
self.execute_jenkins_jobs_with_args(args)
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_all_jobs") args = ["--conf", default_config_file, "delete-all"]
def test_delete_all_abort(self, delete_job_mock): execute_jenkins_jobs(args)
"""
Test handling the deletion of a single Jenkins job.
"""
args = ["--conf", self.default_config_file, "delete-all"]
with mock.patch("jenkins_jobs.utils.input", return_value="n"): def test_delete_all_abort(mocker, default_config_file, execute_jenkins_jobs):
self.assertRaises(SystemExit, self.execute_jenkins_jobs_with_args, args) """
Test handling the deletion of a single Jenkins job.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.utils.input", return_value="n")
args = ["--conf", default_config_file, "delete-all"]
with pytest.raises(SystemExit):
execute_jenkins_jobs(args)

View File

@ -12,87 +12,83 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import io
import os
from testscenarios.testcase import TestWithScenarios from collections import namedtuple
from tests.base import mock import pytest
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) JobsScenario = namedtuple("JobsScnenario", "name jobs globs found")
class ListFromJenkinsTests(TestWithScenarios, CmdTestsBase):
scenarios = [ jobs_scenarios = [
("single", dict(jobs=["job1"], globs=[], found=["job1"])), JobsScenario("single", jobs=["job1"], globs=[], found=["job1"]),
("multiple", dict(jobs=["job1", "job2"], globs=[], found=["job1", "job2"])), JobsScenario("multiple", jobs=["job1", "job2"], globs=[], found=["job1", "job2"]),
( JobsScenario(
"multiple_with_folder", "multiple_with_folder",
dict( jobs=["folder1", "folder1/job1", "folder1/job2"],
jobs=["folder1", "folder1/job1", "folder1/job2"], globs=[],
globs=[], found=["folder1", "folder1/job1", "folder1/job2"],
found=["folder1", "folder1/job1", "folder1/job2"], ),
), JobsScenario(
), "multiple_with_glob",
( jobs=["job1", "job2", "job3"],
"multiple_with_glob", globs=["job[1-2]"],
dict( found=["job1", "job2"],
jobs=["job1", "job2", "job3"], ),
globs=["job[1-2]"], JobsScenario(
found=["job1", "job2"], "multiple_with_multi_glob",
), jobs=["job1", "job2", "job3", "job4"],
), globs=["job1", "job[24]"],
( found=["job1", "job2", "job4"],
"multiple_with_multi_glob", ),
dict( ]
jobs=["job1", "job2", "job3", "job4"],
globs=["job1", "job[24]"],
found=["job1", "job2", "job4"],
),
),
]
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_jobs")
def test_list(self, get_jobs_mock):
def _get_jobs():
return [{"fullname": fullname} for fullname in self.jobs]
get_jobs_mock.side_effect = _get_jobs
console_out = io.BytesIO()
args = ["--conf", self.default_config_file, "list"] + self.globs
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(args)
self.assertEqual(
console_out.getvalue().decode("utf-8").rstrip(), ("\n".join(self.found))
)
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) @pytest.mark.parametrize(
class ListFromYamlTests(TestWithScenarios, CmdTestsBase): "scenario",
[pytest.param(s, id=s.name) for s in jobs_scenarios],
)
def test_from_jenkins_tests(
capsys, mocker, default_config_file, execute_jenkins_jobs, scenario
):
def get_jobs():
return [{"fullname": fullname} for fullname in scenario.jobs]
scenarios = [ mocker.patch("jenkins_jobs.builder.JenkinsManager.get_jobs", side_effect=get_jobs)
("all", dict(globs=[], found=["bam001", "bar001", "bar002", "baz001"])),
(
"some",
dict(
globs=["*am*", "*002", "bar001"], found=["bam001", "bar001", "bar002"]
),
),
]
def test_list(self): args = ["--conf", default_config_file, "list"] + scenario.globs
path = os.path.join(self.fixtures_path, "cmd-002.yaml") execute_jenkins_jobs(args)
console_out = io.BytesIO() expected_out = "\n".join(scenario.found)
with mock.patch("sys.stdout", console_out): captured = capsys.readouterr()
self.execute_jenkins_jobs_with_args( assert captured.out.rstrip() == expected_out
["--conf", self.default_config_file, "list", "-p", path] + self.globs
)
self.assertEqual(
console_out.getvalue().decode("utf-8").rstrip(), ("\n".join(self.found)) YamlScenario = namedtuple("YamlScnenario", "name globs found")
)
yaml_scenarios = [
YamlScenario("all", globs=[], found=["bam001", "bar001", "bar002", "baz001"]),
YamlScenario(
"some",
globs=["*am*", "*002", "bar001"],
found=["bam001", "bar001", "bar002"],
),
]
@pytest.mark.parametrize(
"scenario",
[pytest.param(s, id=s.name) for s in yaml_scenarios],
)
def test_from_yaml_tests(
capsys, fixtures_dir, default_config_file, execute_jenkins_jobs, scenario
):
path = fixtures_dir / "cmd-002.yaml"
execute_jenkins_jobs(
["--conf", default_config_file, "list", "-p", str(path)] + scenario.globs
)
expected_out = "\n".join(scenario.found)
captured = capsys.readouterr()
assert captured.out.rstrip() == expected_out

View File

@ -18,295 +18,294 @@
# of actions by the JJB library, usually through interaction with the # of actions by the JJB library, usually through interaction with the
# python-jenkins library. # python-jenkins library.
import difflib
import filecmp import filecmp
import io import io
import difflib
import os import os
import shutil
import tempfile
import yaml import yaml
from unittest import mock
import jenkins import jenkins
from six.moves import StringIO import pytest
import testtools from testtools.assertions import assert_that
from jenkins_jobs.cli import entry from jenkins_jobs.cli import entry
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) def test_non_existing_job(fixtures_dir, default_config_file, execute_jenkins_jobs):
class TestTests(CmdTestsBase): """
def test_non_existing_job(self): Run test mode and pass a non-existing job name
""" (probably better to fail here)
Run test mode and pass a non-existing job name """
(probably better to fail here) args = [
""" "--conf",
args = [ default_config_file,
"--conf", "test",
self.default_config_file, str(fixtures_dir / "cmd-001.yaml"),
"test", "invalid",
os.path.join(self.fixtures_path, "cmd-001.yaml"), ]
"invalid", execute_jenkins_jobs(args)
]
self.execute_jenkins_jobs_with_args(args)
def test_valid_job(self):
"""
Run test mode and pass a valid job name
"""
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"foo-job",
]
console_out = io.BytesIO()
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(args)
def test_console_output(self):
"""
Run test mode and verify that resulting XML gets sent to the console.
"""
console_out = io.BytesIO()
with mock.patch("sys.stdout", console_out):
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
self.assertEqual(console_out.getvalue().decode("utf-8"), xml_content)
def test_output_dir(self):
"""
Run test mode with output to directory and verify that output files are
generated.
"""
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
args = ["test", os.path.join(self.fixtures_path, "cmd-001.yaml"), "-o", tmpdir]
self.execute_jenkins_jobs_with_args(args)
self.expectThat(
os.path.join(tmpdir, "foo-job"), testtools.matchers.FileExists()
)
def test_output_dir_config_xml(self):
"""
Run test mode with output to directory in "config.xml" mode and verify
that output files are generated.
"""
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
args = [
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"-o",
tmpdir,
"--config-xml",
]
self.execute_jenkins_jobs_with_args(args)
self.expectThat(
os.path.join(tmpdir, "foo-job", "config.xml"),
testtools.matchers.FileExists(),
)
def test_stream_input_output_no_encoding_exceed_recursion(self):
"""
Test that we don't have issues processing large number of jobs and
outputting the result if the encoding is not set.
"""
console_out = io.BytesIO()
input_file = os.path.join(self.fixtures_path, "large-number-of-jobs-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
console_out.encoding = None
with mock.patch("sys.stdin", f):
args = ["test"]
self.execute_jenkins_jobs_with_args(args)
def test_stream_input_output_utf8_encoding(self):
"""
Run test mode simulating using pipes for input and output using
utf-8 encoding
"""
console_out = io.BytesIO()
input_file = os.path.join(self.fixtures_path, "cmd-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
value = console_out.getvalue().decode("utf-8")
self.assertEqual(value, xml_content)
def test_stream_input_output_ascii_encoding(self):
"""
Run test mode simulating using pipes for input and output using
ascii encoding with unicode input
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
input_file = os.path.join(self.fixtures_path, "cmd-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
value = console_out.getvalue().decode("ascii")
self.assertEqual(value, xml_content)
def test_stream_output_ascii_encoding_invalid_char(self):
"""
Run test mode simulating using pipes for input and output using
ascii encoding for output with include containing a character
that cannot be converted.
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
input_file = os.path.join(self.fixtures_path, "unicode001.yaml")
with io.open(input_file, "r", encoding="utf-8") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
jenkins_jobs = entry.JenkinsJobs(args)
e = self.assertRaises(UnicodeError, jenkins_jobs.execute)
self.assertIn("'ascii' codec can't encode character", str(e))
@mock.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
@mock.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
def test_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Test handling of plugins_info stub option.
"""
plugins_info_stub_yaml_file = os.path.join(
self.fixtures_path, "plugins-info.yaml"
)
args = [
"--conf",
os.path.join(self.fixtures_path, "cmd-001.conf"),
"test",
"-p",
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
with io.open(plugins_info_stub_yaml_file, "r", encoding="utf-8") as yaml_file:
plugins_info_list = yaml.safe_load(yaml_file)
registry_mock.assert_called_with(mock.ANY, plugins_info_list)
@mock.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
@mock.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
def test_bogus_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
plugins_info_stub_yaml_file = os.path.join(
self.fixtures_path, "bogus-plugins-info.yaml"
)
args = [
"--conf",
os.path.join(self.fixtures_path, "cmd-001.conf"),
"test",
"-p",
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
stderr = StringIO()
with mock.patch("sys.stderr", stderr):
self.assertRaises(SystemExit, entry.JenkinsJobs, args)
self.assertIn("must contain a Yaml list", stderr.getvalue())
class TestJenkinsGetPluginInfoError(CmdTestsBase): def test_valid_job(fixtures_dir, default_config_file, execute_jenkins_jobs):
"""Test without mocking get_plugins_info. """
Run test mode and pass a valid job name
"""
args = [
"--conf",
default_config_file,
"test",
str(fixtures_dir / "cmd-001.yaml"),
"foo-job",
]
execute_jenkins_jobs(args)
This test class is used for testing the 'test' subcommand when we want
to validate its behavior without mocking def test_console_output(
jenkins_jobs.builder.JenkinsManager.get_plugins_info capsys, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Run test mode and verify that resulting XML gets sent to the console.
""" """
@mock.patch("jenkins.Jenkins.get_plugins") args = [
def test_console_output_jenkins_connection_failure_warning(self, get_plugins_mock): "--conf",
""" default_config_file,
Run test mode and verify that failed Jenkins connection attempt "test",
exception does not bubble out of cmd.main. Ideally, we would also test str(fixtures_dir / "cmd-001.yaml"),
that an appropriate message is logged to stderr but it's somewhat ]
difficult to figure out how to actually enable stderr in this test execute_jenkins_jobs(args)
suite.
"""
get_plugins_mock.side_effect = jenkins.JenkinsException("Connection refused") expected_output = fixtures_dir.joinpath("cmd-001.xml").read_text()
with mock.patch("sys.stdout"): captured = capsys.readouterr()
try: assert captured.out == expected_output
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
except jenkins.JenkinsException:
self.fail("jenkins.JenkinsException propagated to main")
except Exception:
pass # only care about jenkins.JenkinsException for now
@mock.patch("jenkins.Jenkins.get_plugins")
def test_skip_plugin_retrieval_if_no_config_provided(self, get_plugins_mock):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if no config file provided.
"""
with mock.patch("sys.stdout", new_callable=io.BytesIO):
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
self.assertFalse(get_plugins_mock.called)
@mock.patch("jenkins.Jenkins.get_plugins_info") def test_output_dir(tmp_path, fixtures_dir, default_config_file, execute_jenkins_jobs):
def test_skip_plugin_retrieval_if_disabled(self, get_plugins_mock): """
""" Run test mode with output to directory and verify that output files are
Verify that retrieval of information from Jenkins instance about its generated.
plugins will be skipped when run if a config file provided and disables """
querying through a config option. args = ["test", str(fixtures_dir / "cmd-001.yaml"), "-o", str(tmp_path)]
""" execute_jenkins_jobs(args)
with mock.patch("sys.stdout", new_callable=io.BytesIO): assert tmp_path.joinpath("foo-job").exists()
args = [
"--conf",
os.path.join(self.fixtures_path, "disable-query-plugins.conf"), def test_output_dir_config_xml(tmp_path, fixtures_dir, execute_jenkins_jobs):
"test", """
os.path.join(self.fixtures_path, "cmd-001.yaml"), Run test mode with output to directory in "config.xml" mode and verify
] that output files are generated.
entry.JenkinsJobs(args) """
self.assertFalse(get_plugins_mock.called) args = [
"test",
str(fixtures_dir / "cmd-001.yaml"),
"-o",
str(tmp_path),
"--config-xml",
]
execute_jenkins_jobs(args)
assert tmp_path.joinpath("foo-job", "config.xml").exists()
def test_stream_input_output_no_encoding_exceed_recursion(
mocker, fixtures_dir, execute_jenkins_jobs
):
"""
Test that we don't have issues processing large number of jobs and
outputting the result if the encoding is not set.
"""
console_out = io.BytesIO()
console_out.encoding = None
mocker.patch("sys.stdout", console_out)
input = fixtures_dir.joinpath("large-number-of-jobs-001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["test"]
execute_jenkins_jobs(args)
def test_stream_input_output_utf8_encoding(
capsys, mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Run test mode simulating using pipes for input and output using
utf-8 encoding
"""
input = fixtures_dir.joinpath("cmd-001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["--conf", default_config_file, "test"]
execute_jenkins_jobs(args)
expected_output = fixtures_dir.joinpath("cmd-001.xml").read_text()
captured = capsys.readouterr()
assert captured.out == expected_output
def test_stream_input_output_ascii_encoding(
mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Run test mode simulating using pipes for input and output using
ascii encoding with unicode input
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
mocker.patch("sys.stdout", console_out)
input = fixtures_dir.joinpath("cmd-001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["--conf", default_config_file, "test"]
execute_jenkins_jobs(args)
expected_output = fixtures_dir.joinpath("cmd-001.xml").read_text()
output = console_out.getvalue().decode("ascii")
assert output == expected_output
def test_stream_output_ascii_encoding_invalid_char(
mocker, fixtures_dir, default_config_file
):
"""
Run test mode simulating using pipes for input and output using
ascii encoding for output with include containing a character
that cannot be converted.
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
mocker.patch("sys.stdout", console_out)
input = fixtures_dir.joinpath("unicode001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["--conf", default_config_file, "test"]
jenkins_jobs = entry.JenkinsJobs(args)
with pytest.raises(UnicodeError) as excinfo:
jenkins_jobs.execute()
assert "'ascii' codec can't encode character" in str(excinfo.value)
def test_plugins_info_stub_option(mocker, fixtures_dir, execute_jenkins_jobs):
"""
Test handling of plugins_info stub option.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
registry_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
plugins_info_stub_yaml_file = fixtures_dir / "plugins-info.yaml"
args = [
"--conf",
str(fixtures_dir / "cmd-001.conf"),
"test",
"-p",
str(plugins_info_stub_yaml_file),
str(fixtures_dir / "cmd-001.yaml"),
]
execute_jenkins_jobs(args)
plugins_info_list = yaml.safe_load(plugins_info_stub_yaml_file.read_text())
registry_mock.assert_called_with(mock.ANY, plugins_info_list)
def test_bogus_plugins_info_stub_option(
capsys, mocker, fixtures_dir, default_config_file
):
"""
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
mocker.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
plugins_info_stub_yaml_file = fixtures_dir / "bogus-plugins-info.yaml"
args = [
"--conf",
str(fixtures_dir / "cmd-001.conf"),
"test",
"-p",
str(plugins_info_stub_yaml_file),
str(fixtures_dir / "cmd-001.yaml"),
]
with pytest.raises(SystemExit):
entry.JenkinsJobs(args)
captured = capsys.readouterr()
assert "must contain a Yaml list" in captured.err
# Test without mocking get_plugins_info.
#
# This test class is used for testing the 'test' subcommand when we want
# to validate its behavior without mocking
# jenkins_jobs.builder.JenkinsManager.get_plugins_info
def test_console_output_jenkins_connection_failure_warning(
caplog, mocker, fixtures_dir, execute_jenkins_jobs
):
"""
Run test mode and verify that failed Jenkins connection attempt
exception does not bubble out of cmd.main.
"""
mocker.patch(
"jenkins.Jenkins.get_plugins",
side_effect=jenkins.JenkinsException("Connection refused"),
)
try:
args = [
"--conf",
str(fixtures_dir / "enable-query-plugins.conf"),
"test",
str(fixtures_dir / "cmd-001.yaml"),
]
execute_jenkins_jobs(args)
except jenkins.JenkinsException:
pytest.fail("jenkins.JenkinsException propagated to main")
except Exception:
pass # only care about jenkins.JenkinsException for now
assert "Unable to retrieve Jenkins Plugin Info" in caplog.text
def test_skip_plugin_retrieval_if_no_config_provided(
mocker, fixtures_dir, default_config_file
):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if no config file provided.
"""
get_plugins_mock = mocker.patch("jenkins.Jenkins.get_plugins")
args = [
"--conf",
default_config_file,
"test",
str(fixtures_dir / "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
assert not get_plugins_mock.called
@mock.patch("jenkins.Jenkins.get_plugins_info")
def test_skip_plugin_retrieval_if_disabled(mocker, fixtures_dir):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if a config file provided and disables
querying through a config option.
"""
get_plugins_mock = mocker.patch("jenkins.Jenkins.get_plugins")
args = [
"--conf",
str(fixtures_dir / "disable-query-plugins.conf"),
"test",
str(fixtures_dir / "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
assert not get_plugins_mock.called
class MatchesDirMissingFilesMismatch(object): class MatchesDirMissingFilesMismatch(object):
@ -377,98 +376,97 @@ class MatchesDir(object):
return None return None
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) @pytest.fixture
class TestTestsMultiPath(CmdTestsBase): def multipath(fixtures_dir):
def setUp(self): path_list = [
super(TestTestsMultiPath, self).setUp() str(fixtures_dir / "multi-path/yamldirs/" / p) for p in ["dir1", "dir2"]
]
return os.pathsep.join(path_list)
path_list = [
os.path.join(self.fixtures_path, "multi-path/yamldirs/", p)
for p in ["dir1", "dir2"]
]
self.multipath = os.pathsep.join(path_list)
self.output_dir = tempfile.mkdtemp()
def check_dirs_match(self, expected_dir): @pytest.fixture
try: def output_dir(tmp_path):
self.assertThat(self.output_dir, MatchesDir(expected_dir)) dir = tmp_path / "output"
except testtools.matchers.MismatchError: dir.mkdir()
raise return str(dir)
else:
shutil.rmtree(self.output_dir)
def test_multi_path(self):
"""
Run test mode and pass multiple paths.
"""
args = [
"--conf",
self.default_config_file,
"test",
"-o",
self.output_dir,
self.multipath,
]
self.execute_jenkins_jobs_with_args(args) def test_multi_path(
self.check_dirs_match( fixtures_dir, default_config_file, execute_jenkins_jobs, output_dir, multipath
os.path.join(self.fixtures_path, "multi-path/output_simple") ):
) """
Run test mode and pass multiple paths.
"""
args = [
"--conf",
default_config_file,
"test",
"-o",
output_dir,
multipath,
]
def test_recursive_multi_path_command_line(self): execute_jenkins_jobs(args)
""" assert_that(output_dir, MatchesDir(fixtures_dir / "multi-path/output_simple"))
Run test mode and pass multiple paths with recursive path option.
"""
args = [
"--conf",
self.default_config_file,
"test",
"-o",
self.output_dir,
"-r",
self.multipath,
]
self.execute_jenkins_jobs_with_args(args)
self.check_dirs_match(
os.path.join(self.fixtures_path, "multi-path/output_recursive")
)
def test_recursive_multi_path_config_file(self): def test_recursive_multi_path_command_line(
# test recursive set in configuration file fixtures_dir, default_config_file, execute_jenkins_jobs, output_dir, multipath
args = [ ):
"--conf", """
os.path.join(self.fixtures_path, "multi-path/builder-recursive.ini"), Run test mode and pass multiple paths with recursive path option.
"test", """
"-o", args = [
self.output_dir, "--conf",
self.multipath, default_config_file,
] "test",
self.execute_jenkins_jobs_with_args(args) "-o",
self.check_dirs_match( output_dir,
os.path.join(self.fixtures_path, "multi-path/output_recursive") "-r",
) multipath,
]
def test_recursive_multi_path_with_excludes(self): execute_jenkins_jobs(args)
""" assert_that(output_dir, MatchesDir(fixtures_dir / "multi-path/output_recursive"))
Run test mode and pass multiple paths with recursive path option.
"""
exclude_path = os.path.join(self.fixtures_path, "multi-path/yamldirs/dir2/dir1")
args = [
"--conf",
self.default_config_file,
"test",
"-x",
exclude_path,
"-o",
self.output_dir,
"-r",
self.multipath,
]
self.execute_jenkins_jobs_with_args(args)
self.check_dirs_match( def test_recursive_multi_path_config_file(
os.path.join( fixtures_dir, execute_jenkins_jobs, output_dir, multipath
self.fixtures_path, "multi-path/output_recursive_with_excludes" ):
) # test recursive set in configuration file
) args = [
"--conf",
str(fixtures_dir / "multi-path/builder-recursive.ini"),
"test",
"-o",
output_dir,
multipath,
]
execute_jenkins_jobs(args)
assert_that(output_dir, MatchesDir(fixtures_dir / "multi-path/output_recursive"))
def test_recursive_multi_path_with_excludes(
fixtures_dir, default_config_file, execute_jenkins_jobs, output_dir, multipath
):
"""
Run test mode and pass multiple paths with recursive path option.
"""
exclude_path = fixtures_dir / "multi-path/yamldirs/dir2/dir1"
args = [
"--conf",
default_config_file,
"test",
"-x",
str(exclude_path),
"-o",
output_dir,
"-r",
multipath,
]
execute_jenkins_jobs(args)
assert_that(
output_dir,
MatchesDir(fixtures_dir / "multi-path/output_recursive_with_excludes"),
)

View File

@ -18,107 +18,110 @@
# of actions by the JJB library, usually through interaction with the # of actions by the JJB library, usually through interaction with the
# python-jenkins library. # python-jenkins library.
import os from unittest import mock
import six
from tests.base import mock import pytest
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) def test_update_jobs(mocker, fixtures_dir, default_config_file, execute_jenkins_jobs):
class UpdateTests(CmdTestsBase): """
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.job_exists") Test update_job is called
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs") """
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.reconfig_job") mocker.patch("jenkins_jobs.builder.jenkins.Jenkins.job_exists")
def test_update_jobs( mocker.patch("jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs")
self, jenkins_reconfig_job, jenkins_get_jobs, jenkins_job_exists reconfig_job = mocker.patch("jenkins_jobs.builder.jenkins.Jenkins.reconfig_job")
):
"""
Test update_job is called
"""
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", self.default_config_file, "update", path]
self.execute_jenkins_jobs_with_args(args) path = fixtures_dir / "cmd-002.yaml"
args = ["--conf", default_config_file, "update", str(path)]
jenkins_reconfig_job.assert_has_calls( execute_jenkins_jobs(args)
[
mock.call(job_name, mock.ANY)
for job_name in ["bar001", "bar002", "baz001", "bam001"]
],
any_order=True,
)
@mock.patch("jenkins_jobs.builder.JenkinsManager.is_job", return_value=True) reconfig_job.assert_has_calls(
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_jobs") [
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_job_md5") mock.call(job_name, mock.ANY)
@mock.patch("jenkins_jobs.builder.JenkinsManager.update_job") for job_name in ["bar001", "bar002", "baz001", "bam001"]
def test_update_jobs_decode_job_output( ],
self, update_job_mock, get_job_md5_mock, get_jobs_mock, is_job_mock any_order=True,
): )
"""
Test that job xml output has been decoded before attempting to update
"""
# don't care about the value returned here
update_job_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", self.default_config_file, "update", path]
self.execute_jenkins_jobs_with_args(args) def test_update_jobs_decode_job_output(
self.assertTrue(isinstance(update_job_mock.call_args[0][1], six.text_type)) mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Test that job xml output has been decoded before attempting to update
"""
mocker.patch("jenkins_jobs.builder.JenkinsManager.is_job", return_value=True)
mocker.patch("jenkins_jobs.builder.JenkinsManager.get_jobs")
mocker.patch("jenkins_jobs.builder.JenkinsManager.get_job_md5")
update_job_mock = mocker.patch("jenkins_jobs.builder.JenkinsManager.update_job")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.job_exists") # don't care about the value returned here
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs") update_job_mock.return_value = ([], 0)
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.reconfig_job")
@mock.patch("jenkins_jobs.builder.jenkins.Jenkins.delete_job")
def test_update_jobs_and_delete_old(
self,
jenkins_delete_job,
jenkins_reconfig_job,
jenkins_get_all_jobs,
jenkins_job_exists,
):
"""Test update behaviour with --delete-old option.
* mock out a call to jenkins.Jenkins.get_jobs() to return a known list path = fixtures_dir / "cmd-002.yaml"
of job names. args = ["--conf", default_config_file, "update", str(path)]
* mock out a call to jenkins.Jenkins.reconfig_job() and
jenkins.Jenkins.delete_job() to detect calls being made to determine
that JJB does correctly delete the jobs it should delete when passed
a specific set of inputs.
* mock out a call to jenkins.Jenkins.job_exists() to always return
True.
"""
yaml_jobs = ["bar001", "bar002", "baz001", "bam001"]
extra_jobs = ["old_job001", "old_job002", "unmanaged"]
path = os.path.join(self.fixtures_path, "cmd-002.yaml") execute_jenkins_jobs(args)
args = ["--conf", self.default_config_file, "update", "--delete-old", path] assert isinstance(update_job_mock.call_args[0][1], str)
jenkins_get_all_jobs.return_value = [
{"fullname": name} for name in yaml_jobs + extra_jobs
]
with mock.patch( def test_update_jobs_and_delete_old(
"jenkins_jobs.builder.JenkinsManager.is_managed", mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
side_effect=(lambda name: name != "unmanaged"), ):
): """Test update behaviour with --delete-old option.
self.execute_jenkins_jobs_with_args(args)
jenkins_reconfig_job.assert_has_calls( * mock out a call to jenkins.Jenkins.get_jobs() to return a known list
[mock.call(job_name, mock.ANY) for job_name in yaml_jobs], any_order=True of job names.
) * mock out a call to jenkins.Jenkins.reconfig_job() and
calls = [mock.call(name) for name in extra_jobs if name != "unmanaged"] jenkins.Jenkins.delete_job() to detect calls being made to determine
jenkins_delete_job.assert_has_calls(calls) that JJB does correctly delete the jobs it should delete when passed
# to ensure only the calls we expected were made, have to check a specific set of inputs.
# there were no others, as no API call for assert_has_only_calls * mock out a call to jenkins.Jenkins.job_exists() to always return
self.assertEqual(jenkins_delete_job.call_count, len(calls)) True.
"""
mocker.patch("jenkins_jobs.builder.jenkins.Jenkins.job_exists")
jenkins_get_all_jobs = mocker.patch(
"jenkins_jobs.builder.jenkins.Jenkins.get_all_jobs"
)
jenkins_reconfig_job = mocker.patch(
"jenkins_jobs.builder.jenkins.Jenkins.reconfig_job"
)
jenkins_delete_job = mocker.patch("jenkins_jobs.builder.jenkins.Jenkins.delete_job")
def test_update_timeout_not_set(self): yaml_jobs = ["bar001", "bar002", "baz001", "bam001"]
"""Validate update timeout behavior when timeout not explicitly configured.""" extra_jobs = ["old_job001", "old_job002", "unmanaged"]
self.skipTest("TODO: Develop actual update timeout test approach.")
def test_update_timeout_set(self): path = fixtures_dir / "cmd-002.yaml"
"""Validate update timeout behavior when timeout is explicitly configured.""" args = ["--conf", default_config_file, "update", "--delete-old", str(path)]
self.skipTest("TODO: Develop actual update timeout test approach.")
jenkins_get_all_jobs.return_value = [
{"fullname": name} for name in yaml_jobs + extra_jobs
]
mocker.patch(
"jenkins_jobs.builder.JenkinsManager.is_managed",
side_effect=(lambda name: name != "unmanaged"),
)
execute_jenkins_jobs(args)
jenkins_reconfig_job.assert_has_calls(
[mock.call(job_name, mock.ANY) for job_name in yaml_jobs], any_order=True
)
calls = [mock.call(name) for name in extra_jobs if name != "unmanaged"]
jenkins_delete_job.assert_has_calls(calls)
# to ensure only the calls we expected were made, have to check
# there were no others, as no API call for assert_has_only_calls
assert jenkins_delete_job.call_count == len(calls)
@pytest.mark.skip(reason="TODO: Develop actual update timeout test approach.")
def test_update_timeout_not_set():
"""Validate update timeout behavior when timeout not explicitly configured."""
pass
@pytest.mark.skip(reason="TODO: Develop actual update timeout test approach.")
def test_update_timeout_set():
"""Validate update timeout behavior when timeout is explicitly configured."""
pass

View File

@ -1,37 +1,11 @@
import os import pytest
from jenkins_jobs.cli import entry from jenkins_jobs.cli import entry
from tests import base
from tests.base import mock
class CmdTestsBase(base.BaseTestCase): def test_with_empty_args(mocker):
"""
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") User passes no args, should fail with SystemExit
"""
def setUp(self): with pytest.raises(SystemExit):
super(CmdTestsBase, self).setUp() entry.JenkinsJobs([])
# Testing the cmd module can sometimes result in the JobCache class
# attempting to create the cache directory multiple times as the tests
# are run in parallel. Stub out the JobCache to ensure that each
# test can safely create the cache directory without risk of
# interference.
cache_patch = mock.patch("jenkins_jobs.builder.JobCache", autospec=True)
self.cache_mock = cache_patch.start()
self.addCleanup(cache_patch.stop)
self.default_config_file = os.path.join(self.fixtures_path, "empty_builder.ini")
def execute_jenkins_jobs_with_args(self, args):
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
class TestCmd(CmdTestsBase):
def test_with_empty_args(self):
"""
User passes no args, should fail with SystemExit
"""
with mock.patch("sys.stderr"):
self.assertRaises(SystemExit, entry.JenkinsJobs, [])

View File

@ -1,160 +1,177 @@
import io import io
import os from pathlib import Path
from tests.base import mock import pytest
from tests.cmd.test_cmd import CmdTestsBase
from jenkins_jobs.cli import entry from jenkins_jobs.cli import entry
from jenkins_jobs import builder from jenkins_jobs import builder
patch = mock.patch
global_conf = "/etc/jenkins_jobs/jenkins_jobs.ini"
user_conf = Path.home() / ".config" / "jenkins_jobs" / "jenkins_jobs.ini"
local_conf = Path(__file__).parent / "jenkins_jobs.ini"
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock) def test_use_global_config(mocker, default_config_file):
class TestConfigs(CmdTestsBase): """
Verify that JJB uses the global config file by default
"""
mocker.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info")
global_conf = "/etc/jenkins_jobs/jenkins_jobs.ini" args = ["test", "foo"]
user_conf = os.path.join(
os.path.expanduser("~"), ".config", "jenkins_jobs", "jenkins_jobs.ini"
)
local_conf = os.path.join(os.path.dirname(__file__), "jenkins_jobs.ini")
def test_use_global_config(self): default_io_open = io.open
"""
Verify that JJB uses the global config file by default
"""
args = ["test", "foo"] def io_open(file, *args, **kw):
conffp = io.open(self.default_config_file, "r", encoding="utf-8") if file == global_conf:
default_io_open(default_config_file, "r", encoding="utf-8")
else:
return default_io_open(file, *args, **kw)
with patch("os.path.isfile", return_value=True) as m_isfile: def isfile(path):
if path == global_conf:
return True
return False
def side_effect(path): mocker.patch("os.path.isfile", side_effect=isfile)
if path == self.global_conf: mocked_open = mocker.patch("io.open", side_effect=io_open)
return True
return False
m_isfile.side_effect = side_effect entry.JenkinsJobs(args, config_file_required=True)
with patch("io.open", return_value=conffp) as m_open: mocked_open.assert_called_with(global_conf, "r", encoding="utf-8")
entry.JenkinsJobs(args, config_file_required=True)
m_open.assert_called_with(self.global_conf, "r", encoding="utf-8")
def test_use_config_in_user_home(self):
"""
Verify that JJB uses config file in user home folder
"""
args = ["test", "foo"] def test_use_config_in_user_home(mocker, default_config_file):
"""
Verify that JJB uses config file in user home folder
"""
conffp = io.open(self.default_config_file, "r", encoding="utf-8") args = ["test", "foo"]
with patch("os.path.isfile", return_value=True) as m_isfile:
def side_effect(path): default_io_open = io.open
if path == self.user_conf:
return True
return False
m_isfile.side_effect = side_effect def io_open(file, *args, **kw):
with patch("io.open", return_value=conffp) as m_open: if file == str(user_conf):
entry.JenkinsJobs(args, config_file_required=True) default_io_open(default_config_file, "r", encoding="utf-8")
m_open.assert_called_with(self.user_conf, "r", encoding="utf-8") else:
return default_io_open(file, *args, **kw)
def test_non_existing_config_dir(self): def isfile(path):
""" if path == str(user_conf):
Run test mode and pass a non-existing configuration directory return True
""" return False
args = ["--conf", self.default_config_file, "test", "foo"]
jenkins_jobs = entry.JenkinsJobs(args)
self.assertRaises(IOError, jenkins_jobs.execute)
def test_non_existing_config_file(self): mocker.patch("os.path.isfile", side_effect=isfile)
""" mocked_open = mocker.patch("io.open", side_effect=io_open)
Run test mode and pass a non-existing configuration file
"""
args = ["--conf", self.default_config_file, "test", "non-existing.yaml"]
jenkins_jobs = entry.JenkinsJobs(args)
self.assertRaises(IOError, jenkins_jobs.execute)
def test_config_options_not_replaced_by_cli_defaults(self): entry.JenkinsJobs(args, config_file_required=True)
""" mocked_open.assert_called_with(str(user_conf), "r", encoding="utf-8")
Run test mode and check config settings from conf file retained
when none of the global CLI options are set.
"""
config_file = os.path.join(self.fixtures_path, "settings_from_config.ini")
args = ["--conf", config_file, "test", "dummy.yaml"]
jenkins_jobs = entry.JenkinsJobs(args)
jjb_config = jenkins_jobs.jjb_config
self.assertEqual(jjb_config.jenkins["user"], "jenkins_user")
self.assertEqual(jjb_config.jenkins["password"], "jenkins_password")
self.assertEqual(jjb_config.builder["ignore_cache"], True)
self.assertEqual(jjb_config.builder["flush_cache"], True)
self.assertEqual(jjb_config.builder["update"], "all")
self.assertEqual(jjb_config.yamlparser["allow_empty_variables"], True)
def test_config_options_overriden_by_cli(self):
"""
Run test mode and check config settings from conf file retained
when none of the global CLI options are set.
"""
args = [
"--user",
"myuser",
"--password",
"mypassword",
"--ignore-cache",
"--flush-cache",
"--allow-empty-variables",
"test",
"dummy.yaml",
]
jenkins_jobs = entry.JenkinsJobs(args)
jjb_config = jenkins_jobs.jjb_config
self.assertEqual(jjb_config.jenkins["user"], "myuser")
self.assertEqual(jjb_config.jenkins["password"], "mypassword")
self.assertEqual(jjb_config.builder["ignore_cache"], True)
self.assertEqual(jjb_config.builder["flush_cache"], True)
self.assertEqual(jjb_config.yamlparser["allow_empty_variables"], True)
@mock.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager") def test_non_existing_config_dir(default_config_file):
def test_update_timeout_not_set(self, jenkins_mock): """
"""Check that timeout is left unset Run test mode and pass a non-existing configuration directory
"""
args = ["--conf", default_config_file, "test", "foo"]
jenkins_jobs = entry.JenkinsJobs(args)
with pytest.raises(IOError):
jenkins_jobs.execute()
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
path = os.path.join(self.fixtures_path, "cmd-002.yaml") def test_non_existing_config_file(default_config_file):
args = ["--conf", self.default_config_file, "update", path] """
Run test mode and pass a non-existing configuration file
"""
args = ["--conf", default_config_file, "test", "non-existing.yaml"]
jenkins_jobs = entry.JenkinsJobs(args)
with pytest.raises(IOError):
jenkins_jobs.execute()
jenkins_mock.return_value.update_jobs.return_value = ([], 0)
jenkins_mock.return_value.update_views.return_value = ([], 0)
self.execute_jenkins_jobs_with_args(args)
# validate that the JJBConfig used to initialize builder.Jenkins def test_config_options_not_replaced_by_cli_defaults(fixtures_dir):
# contains the expected timeout value. """
Run test mode and check config settings from conf file retained
when none of the global CLI options are set.
"""
config_file = fixtures_dir / "settings_from_config.ini"
args = ["--conf", str(config_file), "test", "dummy.yaml"]
jenkins_jobs = entry.JenkinsJobs(args)
jjb_config = jenkins_jobs.jjb_config
assert jjb_config.jenkins["user"] == "jenkins_user"
assert jjb_config.jenkins["password"] == "jenkins_password"
assert jjb_config.builder["ignore_cache"]
assert jjb_config.builder["flush_cache"]
assert jjb_config.builder["update"] == "all"
assert jjb_config.yamlparser["allow_empty_variables"]
jjb_config = jenkins_mock.call_args[0][0]
self.assertEqual(jjb_config.jenkins["timeout"], builder._DEFAULT_TIMEOUT)
@mock.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager") def test_config_options_overriden_by_cli():
def test_update_timeout_set(self, jenkins_mock): """
"""Check that timeout is set correctly Run test mode and check config settings from conf file retained
when none of the global CLI options are set.
"""
args = [
"--user",
"myuser",
"--password",
"mypassword",
"--ignore-cache",
"--flush-cache",
"--allow-empty-variables",
"test",
"dummy.yaml",
]
jenkins_jobs = entry.JenkinsJobs(args)
jjb_config = jenkins_jobs.jjb_config
assert jjb_config.jenkins["user"] == "myuser"
assert jjb_config.jenkins["password"] == "mypassword"
assert jjb_config.builder["ignore_cache"]
assert jjb_config.builder["flush_cache"]
assert jjb_config.yamlparser["allow_empty_variables"]
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
path = os.path.join(self.fixtures_path, "cmd-002.yaml") def test_update_timeout_not_set(mocker, fixtures_dir, default_config_file):
config_file = os.path.join(self.fixtures_path, "non-default-timeout.ini") """Check that timeout is left unset
args = ["--conf", config_file, "update", path]
jenkins_mock.return_value.update_jobs.return_value = ([], 0) Test that the Jenkins object has the timeout set on it only when
jenkins_mock.return_value.update_views.return_value = ([], 0) provided via the config option.
self.execute_jenkins_jobs_with_args(args) """
jenkins_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager")
# validate that the JJBConfig used to initialize builder.Jenkins path = fixtures_dir / "cmd-002.yaml"
# contains the expected timeout value. args = ["--conf", default_config_file, "update", str(path)]
jjb_config = jenkins_mock.call_args[0][0] jenkins_mock.return_value.update_jobs.return_value = ([], 0)
self.assertEqual(jjb_config.jenkins["timeout"], 0.2) jenkins_mock.return_value.update_views.return_value = ([], 0)
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
# validate that the JJBConfig used to initialize builder.Jenkins
# contains the expected timeout value.
jjb_config = jenkins_mock.call_args[0][0]
assert jjb_config.jenkins["timeout"] == builder._DEFAULT_TIMEOUT
def test_update_timeout_set(mocker, fixtures_dir):
"""Check that timeout is set correctly
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
jenkins_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager")
path = fixtures_dir / "cmd-002.yaml"
config_file = fixtures_dir / "non-default-timeout.ini"
args = ["--conf", str(config_file), "update", str(path)]
jenkins_mock.return_value.update_jobs.return_value = ([], 0)
jenkins_mock.return_value.update_views.return_value = ([], 0)
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
# validate that the JJBConfig used to initialize builder.Jenkins
# contains the expected timeout value.
jjb_config = jenkins_mock.call_args[0][0]
assert jjb_config.jenkins["timeout"] == 0.2

View File

@ -1,7 +1,4 @@
import os from pathlib import Path
from tests.base import mock
import testtools
from jenkins_jobs import utils from jenkins_jobs import utils
@ -26,114 +23,104 @@ def fake_os_walk(paths):
return os_walk return os_walk
# Testing the utils module can sometimes result in the JobCache class def test_recursive_path_option_exclude_pattern(mocker):
# attempting to create the cache directory multiple times as the tests """
# are run in parallel. Stub out the JobCache to ensure that each Test paths returned by the recursive processing when using pattern
# test can safely create the object without effect. excludes.
@mock.patch("jenkins_jobs.builder.JobCache", mock.MagicMock)
class CmdRecursePath(testtools.TestCase):
@mock.patch("jenkins_jobs.utils.os.walk")
def test_recursive_path_option_exclude_pattern(self, oswalk_mock):
"""
Test paths returned by the recursive processing when using pattern
excludes.
testing paths testing paths
/jjb_configs/dir1/test1/ /jjb_configs/dir1/test1/
/jjb_configs/dir1/file /jjb_configs/dir1/file
/jjb_configs/dir2/test2/ /jjb_configs/dir2/test2/
/jjb_configs/dir3/bar/ /jjb_configs/dir3/bar/
/jjb_configs/test3/bar/ /jjb_configs/test3/bar/
/jjb_configs/test3/baz/ /jjb_configs/test3/baz/
""" """
os_walk_paths = [ os_walk_paths = [
("/jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())), ("/jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())),
("/jjb_configs/dir1", (["test1"], ("file"))), ("/jjb_configs/dir1", (["test1"], ("file"))),
("/jjb_configs/dir2", (["test2"], ())), ("/jjb_configs/dir2", (["test2"], ())),
("/jjb_configs/dir3", (["bar"], ())), ("/jjb_configs/dir3", (["bar"], ())),
("/jjb_configs/dir3/bar", ([], ())), ("/jjb_configs/dir3/bar", ([], ())),
("/jjb_configs/test3/bar", None), ("/jjb_configs/test3/bar", None),
("/jjb_configs/test3/baz", None), ("/jjb_configs/test3/baz", None),
] ]
paths = [k for k, v in os_walk_paths if v is not None] paths = [k for k, v in os_walk_paths if v is not None]
oswalk_mock.side_effect = fake_os_walk(os_walk_paths) mocker.patch("jenkins_jobs.utils.os.walk", side_effect=fake_os_walk(os_walk_paths))
self.assertEqual(paths, utils.recurse_path("/jjb_configs", ["test*"])) assert paths == utils.recurse_path("/jjb_configs", ["test*"])
@mock.patch("jenkins_jobs.utils.os.walk")
def test_recursive_path_option_exclude_absolute(self, oswalk_mock):
"""
Test paths returned by the recursive processing when using absolute
excludes.
testing paths def test_recursive_path_option_exclude_absolute(mocker):
/jjb_configs/dir1/test1/ """
/jjb_configs/dir1/file Test paths returned by the recursive processing when using absolute
/jjb_configs/dir2/test2/ excludes.
/jjb_configs/dir3/bar/
/jjb_configs/test3/bar/
/jjb_configs/test3/baz/
"""
os_walk_paths = [ testing paths
("/jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())), /jjb_configs/dir1/test1/
("/jjb_configs/dir1", None), /jjb_configs/dir1/file
("/jjb_configs/dir2", (["test2"], ())), /jjb_configs/dir2/test2/
("/jjb_configs/dir3", (["bar"], ())), /jjb_configs/dir3/bar/
("/jjb_configs/test3", (["bar", "baz"], ())), /jjb_configs/test3/bar/
("/jjb_configs/dir2/test2", ([], ())), /jjb_configs/test3/baz/
("/jjb_configs/dir3/bar", ([], ())), """
("/jjb_configs/test3/bar", ([], ())),
("/jjb_configs/test3/baz", ([], ())),
]
paths = [k for k, v in os_walk_paths if v is not None] os_walk_paths = [
("/jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())),
("/jjb_configs/dir1", None),
("/jjb_configs/dir2", (["test2"], ())),
("/jjb_configs/dir3", (["bar"], ())),
("/jjb_configs/test3", (["bar", "baz"], ())),
("/jjb_configs/dir2/test2", ([], ())),
("/jjb_configs/dir3/bar", ([], ())),
("/jjb_configs/test3/bar", ([], ())),
("/jjb_configs/test3/baz", ([], ())),
]
oswalk_mock.side_effect = fake_os_walk(os_walk_paths) paths = [k for k, v in os_walk_paths if v is not None]
self.assertEqual( mocker.patch("jenkins_jobs.utils.os.walk", side_effect=fake_os_walk(os_walk_paths))
paths, utils.recurse_path("/jjb_configs", ["/jjb_configs/dir1"])
)
@mock.patch("jenkins_jobs.utils.os.walk") assert paths == utils.recurse_path("/jjb_configs", ["/jjb_configs/dir1"])
def test_recursive_path_option_exclude_relative(self, oswalk_mock):
"""
Test paths returned by the recursive processing when using relative
excludes.
testing paths
./jjb_configs/dir1/test/
./jjb_configs/dir1/file
./jjb_configs/dir2/test/
./jjb_configs/dir3/bar/
./jjb_configs/test3/bar/
./jjb_configs/test3/baz/
"""
os_walk_paths = [ def test_recursive_path_option_exclude_relative(mocker):
("jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())), """
("jjb_configs/dir1", (["test"], ("file"))), Test paths returned by the recursive processing when using relative
("jjb_configs/dir2", (["test2"], ())), excludes.
("jjb_configs/dir3", (["bar"], ())),
("jjb_configs/test3", (["bar", "baz"], ())),
("jjb_configs/dir1/test", ([], ())),
("jjb_configs/dir2/test2", ([], ())),
("jjb_configs/dir3/bar", ([], ())),
("jjb_configs/test3/bar", None),
("jjb_configs/test3/baz", ([], ())),
]
rel_os_walk_paths = [ testing paths
(os.path.abspath(os.path.join(os.path.curdir, k)), v) ./jjb_configs/dir1/test/
for k, v in os_walk_paths ./jjb_configs/dir1/file
] ./jjb_configs/dir2/test/
./jjb_configs/dir3/bar/
./jjb_configs/test3/bar/
./jjb_configs/test3/baz/
"""
paths = [k for k, v in rel_os_walk_paths if v is not None] os_walk_paths = [
("jjb_configs", (["dir1", "dir2", "dir3", "test3"], ())),
("jjb_configs/dir1", (["test"], ("file"))),
("jjb_configs/dir2", (["test2"], ())),
("jjb_configs/dir3", (["bar"], ())),
("jjb_configs/test3", (["bar", "baz"], ())),
("jjb_configs/dir1/test", ([], ())),
("jjb_configs/dir2/test2", ([], ())),
("jjb_configs/dir3/bar", ([], ())),
("jjb_configs/test3/bar", None),
("jjb_configs/test3/baz", ([], ())),
]
oswalk_mock.side_effect = fake_os_walk(rel_os_walk_paths) rel_os_walk_paths = [
(str(Path.cwd().joinpath(k).absolute()), v) for k, v in os_walk_paths
]
self.assertEqual( paths = [k for k, v in rel_os_walk_paths if v is not None]
paths, utils.recurse_path("jjb_configs", ["jjb_configs/test3/bar"])
) mocker.patch(
"jenkins_jobs.utils.os.walk", side_effect=fake_os_walk(rel_os_walk_paths)
)
assert paths == utils.recurse_path("jjb_configs", ["jjb_configs/test3/bar"])

169
tests/conftest.py Normal file
View File

@ -0,0 +1,169 @@
import configparser
import pkg_resources
import xml.etree.ElementTree as XML
from pathlib import Path
import pytest
from jenkins_jobs.alphanum import AlphanumSort
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.modules import project_externaljob
from jenkins_jobs.modules import project_flow
from jenkins_jobs.modules import project_githuborg
from jenkins_jobs.modules import project_matrix
from jenkins_jobs.modules import project_maven
from jenkins_jobs.modules import project_multibranch
from jenkins_jobs.modules import project_multijob
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJob, XmlJobGenerator
import jenkins_jobs.local_yaml as yaml
# Avoid writing to ~/.cache/jenkins_jobs.
@pytest.fixture(autouse=True)
def job_cache_mocked(mocker):
mocker.patch("jenkins_jobs.builder.JobCache", autospec=True)
@pytest.fixture
def config_path(scenario):
return scenario.config_path
@pytest.fixture
def jjb_config(config_path):
config = JJBConfig(config_path)
config.validate()
return config
@pytest.fixture
def mock_iter_entry_points():
config = configparser.ConfigParser()
config.read(Path(__file__).parent / "../setup.cfg")
groups = {}
for key in config["entry_points"]:
groups[key] = list()
for line in config["entry_points"][key].split("\n"):
if "" == line.strip():
continue
groups[key].append(
pkg_resources.EntryPoint.parse(line, dist=pkg_resources.Distribution())
)
def iter_entry_points(group, name=None):
return (entry for entry in groups[group] if name is None or name == entry.name)
return iter_entry_points
@pytest.fixture
def input(scenario):
return yaml.load(scenario.in_path.read_text())
@pytest.fixture
def plugins_info(scenario):
if not scenario.plugins_info_path.exists():
return None
return yaml.load(scenario.plugins_info_path.read_text())
@pytest.fixture
def registry(mocker, mock_iter_entry_points, jjb_config, plugins_info):
mocker.patch("pkg_resources.iter_entry_points", side_effect=mock_iter_entry_points)
return ModuleRegistry(jjb_config, plugins_info)
@pytest.fixture
def project(input, registry):
type_to_class = {
"maven": project_maven.Maven,
"matrix": project_matrix.Matrix,
"flow": project_flow.Flow,
"githuborg": project_githuborg.GithubOrganization,
"multijob": project_multijob.MultiJob,
"multibranch": project_multibranch.WorkflowMultiBranch,
"multibranch-defaults": project_multibranch.WorkflowMultiBranchDefaults,
"externaljob": project_externaljob.ExternalJob,
}
try:
class_name = input["project-type"]
except KeyError:
return None
if class_name == "freestyle":
return None
cls = type_to_class[class_name]
return cls(registry)
@pytest.fixture
def expected_output(scenario):
return "".join(path.read_text() for path in sorted(scenario.out_paths))
def check_folder(scenario, jjb_config, input):
if "name" not in input:
return
parser = YamlParser(jjb_config)
*dirs, name = parser._getfullname(input).split("/")
input_dir = scenario.in_path.parent
expected_out_dirs = [input_dir.joinpath(*dirs)]
actual_out_dirs = [path.parent for path in scenario.out_paths]
assert expected_out_dirs == actual_out_dirs
@pytest.fixture
def check_generator(scenario, input, expected_output, jjb_config, registry, project):
registry.set_parser_data({})
if project:
xml = project.root_xml(input)
else:
xml = XML.Element("project")
def check(Generator):
generator = Generator(registry)
generator.gen_xml(xml, input)
check_folder(scenario, jjb_config, input)
pretty_xml = XmlJob(xml, "fixturejob").output().decode()
assert expected_output == pretty_xml
return check
@pytest.fixture
def check_parser(jjb_config, registry):
parser = YamlParser(jjb_config)
def check(in_path):
parser.parse(str(in_path))
_ = parser.expandYaml(registry)
return check
@pytest.fixture
def check_job(scenario, expected_output, jjb_config, registry):
parser = YamlParser(jjb_config)
def check():
parser.parse(str(scenario.in_path))
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry)
generator = XmlJobGenerator(registry)
job_xml_list = generator.generateXML(job_data_list)
job_xml_list.sort(key=AlphanumSort)
pretty_xml = (
"\n".join(job.output().decode() for job in job_xml_list)
.strip()
.replace("\n\n", "\n")
)
stripped_expected_output = (
expected_output.strip().replace("<BLANKLINE>", "").replace("\n\n", "\n")
)
assert stripped_expected_output == pretty_xml
return check

View File

@ -13,24 +13,30 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
from testtools import ExpectedException import pytest
from jenkins_jobs.errors import JenkinsJobsException from jenkins_jobs.errors import JenkinsJobsException
from tests import base from tests.enum_scenarios import scenario_list
from tests.base import mock
class TestCaseModuleDuplicates(base.SingleJobTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
@mock.patch("jenkins_jobs.builder.logger", autospec=True)
def test_yaml_snippet(self, mock_logger):
if os.path.basename(self.in_filename).startswith("exception_"): @pytest.fixture(
with ExpectedException(JenkinsJobsException, "^Duplicate .*"): params=scenario_list(fixtures_dir),
super(TestCaseModuleDuplicates, self).test_yaml_snippet() ids=attrgetter("name"),
else: )
super(TestCaseModuleDuplicates, self).test_yaml_snippet() def scenario(request):
return request.param
def test_yaml_snippet(scenario, check_job):
if scenario.in_path.name.startswith("exception_"):
with pytest.raises(JenkinsJobsException) as excinfo:
check_job()
assert str(excinfo.value).startswith("Duplicate ")
else:
check_job()

42
tests/enum_scenarios.py Normal file
View File

@ -0,0 +1,42 @@
#!/usr/bin/env python
#
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import namedtuple
Scenario = namedtuple(
"Scnenario", "name in_path out_paths config_path plugins_info_path"
)
def scenario_list(fixtures_dir, in_ext=".yaml", out_ext=".xml"):
for path in fixtures_dir.rglob(f"*{in_ext}"):
if path.name.endswith("plugins_info.yaml"):
continue
out_path = path.with_suffix(out_ext)
out_path_list = list(fixtures_dir.rglob(out_path.name))
yield Scenario(
name=path.stem,
in_path=path,
out_paths=out_path_list,
# When config file is missing it will still be passed and not None,
# so JJBConfig will prefer it over system and user configs.
config_path=path.with_suffix(".conf"),
plugins_info_path=path.with_suffix(".plugins_info.yaml"),
)

View File

@ -1,7 +1,6 @@
from testtools import ExpectedException import pytest
from jenkins_jobs import errors from jenkins_jobs import errors
from tests import base
def dispatch(exc, *args): def dispatch(exc, *args):
@ -21,65 +20,67 @@ def gen_xml(exc, *args):
raise exc(*args) raise exc(*args)
class TestInvalidAttributeError(base.BaseTestCase): def test_no_valid_values():
def test_no_valid_values(self): # When given no valid values, InvalidAttributeError simply displays a
# When given no valid values, InvalidAttributeError simply displays a # message indicating the invalid value, the component type, the
# message indicating the invalid value, the component type, the # component name, and the attribute name.
# component name, and the attribute name. message = "'{0}' is an invalid value for attribute {1}.{2}".format(
message = "'{0}' is an invalid value for attribute {1}.{2}".format( "fnord", "type.name", "fubar"
"fnord", "type.name", "fubar" )
) with pytest.raises(errors.InvalidAttributeError) as excinfo:
with ExpectedException(errors.InvalidAttributeError, message): dispatch(errors.InvalidAttributeError, "fubar", "fnord")
dispatch(errors.InvalidAttributeError, "fubar", "fnord") assert str(excinfo.value) == message
def test_with_valid_values(self):
# When given valid values, InvalidAttributeError displays a message
# indicating the invalid value, the component type, the component name,
# and the attribute name; additionally, it lists the valid values for
# the current component type & name.
valid_values = ["herp", "derp"]
message = "'{0}' is an invalid value for attribute {1}.{2}".format(
"fnord", "type.name", "fubar"
)
message += "\nValid values include: {0}".format(
", ".join("'{0}'".format(value) for value in valid_values)
)
with ExpectedException(errors.InvalidAttributeError, message):
dispatch(errors.InvalidAttributeError, "fubar", "fnord", valid_values)
class TestMissingAttributeError(base.BaseTestCase): def test_with_valid_values():
def test_with_single_missing_attribute(self): # When given valid values, InvalidAttributeError displays a message
# When passed a single missing attribute, display a message indicating # indicating the invalid value, the component type, the component name,
# * the missing attribute # and the attribute name; additionally, it lists the valid values for
# * which component type and component name is missing it. # the current component type & name.
missing_attribute = "herp" valid_values = ["herp", "derp"]
message = "Missing {0} from an instance of '{1}'".format( message = "'{0}' is an invalid value for attribute {1}.{2}".format(
missing_attribute, "type.name" "fnord", "type.name", "fubar"
) )
message += "\nValid values include: {0}".format(
", ".join("'{0}'".format(value) for value in valid_values)
)
with ExpectedException(errors.MissingAttributeError, message): with pytest.raises(errors.InvalidAttributeError) as excinfo:
dispatch(errors.MissingAttributeError, missing_attribute) dispatch(errors.InvalidAttributeError, "fubar", "fnord", valid_values)
assert str(excinfo.value) == message
with ExpectedException(
errors.MissingAttributeError, message.replace("type.name", "module")
):
gen_xml(errors.MissingAttributeError, missing_attribute)
def test_with_multiple_missing_attributes(self): def test_with_single_missing_attribute():
# When passed multiple missing attributes, display a message indicating # When passed a single missing attribute, display a message indicating
# * the missing attributes # * the missing attribute
# * which component type and component name is missing it. # * which component type and component name is missing it.
missing_attribute = ["herp", "derp"] missing_attribute = "herp"
message = "One of {0} must be present in '{1}'".format( message = "Missing {0} from an instance of '{1}'".format(
", ".join("'{0}'".format(value) for value in missing_attribute), "type.name" missing_attribute, "type.name"
) )
with ExpectedException(errors.MissingAttributeError, message): with pytest.raises(errors.MissingAttributeError) as excinfo:
dispatch(errors.MissingAttributeError, missing_attribute) dispatch(errors.MissingAttributeError, missing_attribute)
assert str(excinfo.value) == message
with ExpectedException( with pytest.raises(errors.MissingAttributeError) as excinfo:
errors.MissingAttributeError, message.replace("type.name", "module") gen_xml(errors.MissingAttributeError, missing_attribute)
): assert str(excinfo.value) == message.replace("type.name", "module")
gen_xml(errors.MissingAttributeError, missing_attribute)
def test_with_multiple_missing_attributes():
# When passed multiple missing attributes, display a message indicating
# * the missing attributes
# * which component type and component name is missing it.
missing_attribute = ["herp", "derp"]
message = "One of {0} must be present in '{1}'".format(
", ".join("'{0}'".format(value) for value in missing_attribute), "type.name"
)
with pytest.raises(errors.MissingAttributeError) as excinfo:
dispatch(errors.MissingAttributeError, missing_attribute)
assert str(excinfo.value) == message
with pytest.raises(errors.MissingAttributeError) as excinfo:
gen_xml(errors.MissingAttributeError, missing_attribute)
assert str(excinfo.value) == message.replace("type.name", "module")

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import general from jenkins_jobs.modules import general
from tests import base
class TestCaseModuleGeneral(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = general.General @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(general.General)

View File

@ -13,13 +13,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from tests import base from operator import attrgetter
import os from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import project_githuborg from jenkins_jobs.modules import project_githuborg
class TestCaseGithubOrganization(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
default_config_file = "/dev/null" @pytest.fixture(
klass = project_githuborg.GithubOrganization params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(project_githuborg.GithubOrganization)

View File

@ -12,13 +12,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import hipchat_notif from jenkins_jobs.modules import hipchat_notif
from tests import base
class TestCaseModulePublishers(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = hipchat_notif.HipChat @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(hipchat_notif.HipChat)

View File

@ -14,72 +14,72 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from unittest import mock
import pytest
from jenkins_jobs.config import JJBConfig from jenkins_jobs.config import JJBConfig
import jenkins_jobs.builder import jenkins_jobs.builder
from tests import base
from tests.base import mock
_plugins_info = {} _plugins_info = {}
_plugins_info["plugin1"] = {"longName": "", "shortName": "", "version": ""} _plugins_info["plugin1"] = {"longName": "", "shortName": "", "version": ""}
@mock.patch("jenkins_jobs.builder.JobCache", mock.MagicMock) @pytest.fixture
class TestCaseTestJenkinsManager(base.BaseTestCase): def jjb_config():
def setUp(self): config = JJBConfig()
super(TestCaseTestJenkinsManager, self).setUp() config.validate()
self.jjb_config = JJBConfig() return config
self.jjb_config.validate()
def test_plugins_list(self):
self.jjb_config.builder["plugins_info"] = _plugins_info
self.builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config) def test_plugins_list(jjb_config):
self.assertEqual(self.builder.plugins_list, _plugins_info) jjb_config.builder["plugins_info"] = _plugins_info
@mock.patch.object( builder = jenkins_jobs.builder.JenkinsManager(jjb_config)
assert builder.plugins_list == _plugins_info
def test_plugins_list_from_jenkins(mocker, jjb_config):
mocker.patch.object(
jenkins_jobs.builder.jenkins.Jenkins, "get_plugins", return_value=_plugins_info jenkins_jobs.builder.jenkins.Jenkins, "get_plugins", return_value=_plugins_info
) )
def test_plugins_list_from_jenkins(self, jenkins_mock): # Trigger fetching the plugins from jenkins when accessing the property
# Trigger fetching the plugins from jenkins when accessing the property jjb_config.builder["plugins_info"] = {}
self.jjb_config.builder["plugins_info"] = {} builder = jenkins_jobs.builder.JenkinsManager(jjb_config)
self.builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config) # See https://github.com/formiaczek/multi_key_dict/issues/17
# See https://github.com/formiaczek/multi_key_dict/issues/17 # self.assertEqual(self.builder.plugins_list, k)
# self.assertEqual(self.builder.plugins_list, k) for key_tuple in builder.plugins_list.keys():
for key_tuple in self.builder.plugins_list.keys(): for key in key_tuple:
for key in key_tuple: assert builder.plugins_list[key] == _plugins_info[key]
self.assertEqual(self.builder.plugins_list[key], _plugins_info[key])
def test_delete_managed(self):
self.jjb_config.builder["plugins_info"] = {}
self.builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config)
with mock.patch.multiple( def test_delete_managed(mocker, jjb_config):
"jenkins_jobs.builder.JenkinsManager", jjb_config.builder["plugins_info"] = {}
get_jobs=mock.DEFAULT, builder = jenkins_jobs.builder.JenkinsManager(jjb_config)
is_job=mock.DEFAULT,
is_managed=mock.DEFAULT,
delete_job=mock.DEFAULT,
) as patches:
patches["get_jobs"].return_value = [
{"fullname": "job1"},
{"fullname": "job2"},
]
patches["is_managed"].side_effect = [True, True]
patches["is_job"].side_effect = [True, True]
self.builder.delete_old_managed() patches = mocker.patch.multiple(
self.assertEqual(patches["delete_job"].call_count, 2) "jenkins_jobs.builder.JenkinsManager",
get_jobs=mock.DEFAULT,
is_job=mock.DEFAULT,
is_managed=mock.DEFAULT,
delete_job=mock.DEFAULT,
)
patches["get_jobs"].return_value = [
{"fullname": "job1"},
{"fullname": "job2"},
]
patches["is_managed"].side_effect = [True, True]
patches["is_job"].side_effect = [True, True]
def _get_plugins_info_error_test(self, error_string): builder.delete_old_managed()
builder = jenkins_jobs.builder.JenkinsManager(self.jjb_config) assert patches["delete_job"].call_count == 2
exception = jenkins_jobs.builder.jenkins.JenkinsException(error_string)
with mock.patch.object(builder.jenkins, "get_plugins", side_effect=exception):
plugins_info = builder.get_plugins_info()
self.assertEqual([_plugins_info["plugin1"]], plugins_info)
def test_get_plugins_info_handles_connectionrefused_errors(self):
self._get_plugins_info_error_test("Connection refused")
def test_get_plugins_info_handles_forbidden_errors(self): @pytest.mark.parametrize("error_string", ["Connection refused", "Forbidden"])
self._get_plugins_info_error_test("Forbidden") def test_get_plugins_info_error(mocker, jjb_config, error_string):
builder = jenkins_jobs.builder.JenkinsManager(jjb_config)
exception = jenkins_jobs.builder.jenkins.JenkinsException(error_string)
mocker.patch.object(builder.jenkins, "get_plugins", side_effect=exception)
plugins_info = builder.get_plugins_info()
assert [_plugins_info["plugin1"]] == plugins_info

View File

@ -15,11 +15,24 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
from tests import base import pytest
from tests.enum_scenarios import scenario_list
class TestCaseModuleJsonParser(base.SingleJobTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path, in_ext="json", out_ext="xml")
@pytest.fixture(
params=scenario_list(fixtures_dir, in_ext=".json"),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_job):
check_job()

View File

@ -14,128 +14,169 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from io import StringIO
import yaml from pathlib import Path
from yaml import safe_dump
from testtools import ExpectedException import json
import pytest
from yaml.composer import ComposerError from yaml.composer import ComposerError
import jenkins_jobs.local_yaml as yaml
from jenkins_jobs.config import JJBConfig from jenkins_jobs.config import JJBConfig
from jenkins_jobs.parser import YamlParser from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry from jenkins_jobs.registry import ModuleRegistry
from tests import base from tests.enum_scenarios import scenario_list
def _exclude_scenarios(input_filename): fixtures_dir = Path(__file__).parent / "fixtures"
return os.path.basename(input_filename).startswith("custom_")
class TestCaseLocalYamlInclude(base.JsonTestCase): @pytest.fixture
def read_input(scenario):
def read():
return yaml.load(
scenario.in_path.read_text(),
search_path=[str(fixtures_dir)],
)
return read
@pytest.mark.parametrize(
"scenario",
[
pytest.param(s, id=s.name)
for s in scenario_list(fixtures_dir, out_ext=".json")
if not s.name.startswith(("custom_", "exception_"))
],
)
def test_include(read_input, expected_output):
""" """
Verify application specific tags independently of any changes to Verify application specific tags independently of any changes to
modules XML parsing behaviour modules XML parsing behaviour
""" """
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") input = read_input()
scenarios = base.get_scenarios( pretty_json = json.dumps(input, indent=4, separators=(",", ": "))
fixtures_path, "yaml", "json", filter_func=_exclude_scenarios assert expected_output.rstrip() == pretty_json
)
def test_yaml_snippet(self):
if os.path.basename(self.in_filename).startswith("exception_"):
with ExpectedException(ComposerError, "^found duplicate anchor .*"):
super(TestCaseLocalYamlInclude, self).test_yaml_snippet()
else:
super(TestCaseLocalYamlInclude, self).test_yaml_snippet()
class TestCaseLocalYamlAnchorAlias(base.YamlTestCase): @pytest.mark.parametrize(
"scenario",
[
pytest.param(s, id=s.name)
for s in scenario_list(fixtures_dir, out_ext=".json")
if s.name.startswith("exception_")
],
)
def test_include_error(read_input, expected_output):
with pytest.raises(ComposerError) as excinfo:
_ = read_input()
assert str(excinfo.value).startswith("found duplicate anchor ")
@pytest.mark.parametrize(
"scenario",
[
pytest.param(s, id=s.name)
for s in scenario_list(fixtures_dir, in_ext=".iyaml", out_ext=".oyaml")
],
)
def test_anchor_alias(read_input, expected_output):
""" """
Verify yaml input is expanded to the expected yaml output when using yaml Verify yaml input is expanded to the expected yaml output when using yaml
anchors and aliases. anchors and aliases.
""" """
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") input = read_input()
scenarios = base.get_scenarios(fixtures_path, "iyaml", "oyaml") data = StringIO(json.dumps(input))
pretty_yaml = safe_dump(json.load(data), default_flow_style=False)
assert expected_output == pretty_yaml
class TestCaseLocalYamlIncludeAnchors(base.BaseTestCase): def test_include_anchors():
"""
Verify that anchors/aliases only span use of '!include' tag
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") To ensure that any yaml loaded by the include tag is in the same
space as the top level file, but individual top level yaml definitions
are treated by the yaml loader as independent.
"""
def test_multiple_same_anchor_in_multiple_toplevel_yaml(self): config = JJBConfig()
""" config.jenkins["url"] = "http://example.com"
Verify that anchors/aliases only span use of '!include' tag config.jenkins["user"] = "jenkins"
config.jenkins["password"] = "password"
config.builder["plugins_info"] = []
config.validate()
To ensure that any yaml loaded by the include tag is in the same files = [
space as the top level file, but individual top level yaml definitions "custom_same_anchor-001-part1.yaml",
are treated by the yaml loader as independent. "custom_same_anchor-001-part2.yaml",
""" ]
files = [ parser = YamlParser(config)
"custom_same_anchor-001-part1.yaml", # Should not raise ComposerError.
"custom_same_anchor-001-part2.yaml", parser.load_files([str(fixtures_dir / name) for name in files])
]
jjb_config = JJBConfig()
jjb_config.jenkins["url"] = "http://example.com"
jjb_config.jenkins["user"] = "jenkins"
jjb_config.jenkins["password"] = "password"
jjb_config.builder["plugins_info"] = []
jjb_config.validate()
j = YamlParser(jjb_config)
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
class TestCaseLocalYamlRetainAnchors(base.BaseTestCase): def test_retain_anchor_default():
"""
Verify that anchors are NOT retained across files by default.
"""
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") config = JJBConfig()
config.validate()
def test_retain_anchors_default(self): files = [
""" "custom_retain_anchors_include001.yaml",
Verify that anchors are NOT retained across files by default. "custom_retain_anchors.yaml",
""" ]
files = ["custom_retain_anchors_include001.yaml", "custom_retain_anchors.yaml"] parser = YamlParser(config)
with pytest.raises(ComposerError) as excinfo:
parser.load_files([str(fixtures_dir / name) for name in files])
assert "found undefined alias" in str(excinfo.value)
jjb_config = JJBConfig()
# use the default value for retain_anchors
jjb_config.validate()
j = YamlParser(jjb_config)
with ExpectedException(yaml.composer.ComposerError, "found undefined alias.*"):
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
def test_retain_anchors_enabled(self): def test_retain_anchors_enabled():
""" """
Verify that anchors are retained across files if retain_anchors is Verify that anchors are retained across files if retain_anchors is
enabled in the config. enabled in the config.
""" """
files = ["custom_retain_anchors_include001.yaml", "custom_retain_anchors.yaml"] config = JJBConfig()
config.yamlparser["retain_anchors"] = True
config.validate()
jjb_config = JJBConfig() files = [
jjb_config.yamlparser["retain_anchors"] = True "custom_retain_anchors_include001.yaml",
jjb_config.validate() "custom_retain_anchors.yaml",
j = YamlParser(jjb_config) ]
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
def test_retain_anchors_enabled_j2_yaml(self): parser = YamlParser(config)
""" # Should not raise ComposerError.
Verify that anchors are retained across files and are properly retained when using !j2-yaml. parser.load_files([str(fixtures_dir / name) for name in files])
"""
files = [
"custom_retain_anchors_j2_yaml_include001.yaml",
"custom_retain_anchors_j2_yaml.yaml",
]
jjb_config = JJBConfig() def test_retain_anchors_enabled_j2_yaml():
jjb_config.yamlparser["retain_anchors"] = True """
jjb_config.validate() Verify that anchors are retained across files and are properly retained when using !j2-yaml.
j = YamlParser(jjb_config) """
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
registry = ModuleRegistry(jjb_config, None) config = JJBConfig()
jobs, _ = j.expandYaml(registry) config.yamlparser["retain_anchors"] = True
self.assertEqual(jobs[0]["builders"][0]["shell"], "docker run ubuntu:latest") config.validate()
files = [
"custom_retain_anchors_j2_yaml_include001.yaml",
"custom_retain_anchors_j2_yaml.yaml",
]
parser = YamlParser(config)
parser.load_files([str(fixtures_dir / name) for name in files])
registry = ModuleRegistry(config, None)
jobs, _ = parser.expandYaml(registry)
assert "docker run ubuntu:latest" == jobs[0]["builders"][0]["shell"]

View File

@ -15,11 +15,24 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
from tests import base import pytest
from tests.enum_scenarios import scenario_list
class TestCaseModuleSCMMacro(base.SingleJobTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
@pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_job):
check_job()

View File

@ -1,140 +1,146 @@
import pkg_resources import pkg_resources
from collections import namedtuple
from operator import attrgetter
from testtools.content import text_content import pytest
import testscenarios
from jenkins_jobs.config import JJBConfig from jenkins_jobs.config import JJBConfig
from jenkins_jobs.registry import ModuleRegistry from jenkins_jobs.registry import ModuleRegistry
from tests import base
class ModuleRegistryPluginInfoTestsWithScenarios( Scenario = namedtuple("Scnenario", "name v1 op v2")
testscenarios.TestWithScenarios, base.BaseTestCase
):
scenarios = [ scenarios = [
("s1", dict(v1="1.0.0", op="__gt__", v2="0.8.0")), Scenario("s1", v1="1.0.0", op="__gt__", v2="0.8.0"),
("s2", dict(v1="1.0.1alpha", op="__gt__", v2="1.0.0")), Scenario("s2", v1="1.0.1alpha", op="__gt__", v2="1.0.0"),
("s3", dict(v1="1.0", op="__eq__", v2="1.0.0")), Scenario("s3", v1="1.0", op="__eq__", v2="1.0.0"),
("s4", dict(v1="1.0", op="__eq__", v2="1.0")), Scenario("s4", v1="1.0", op="__eq__", v2="1.0"),
("s5", dict(v1="1.0", op="__lt__", v2="1.8.0")), Scenario("s5", v1="1.0", op="__lt__", v2="1.8.0"),
("s6", dict(v1="1.0.1alpha", op="__lt__", v2="1.0.1")), Scenario("s6", v1="1.0.1alpha", op="__lt__", v2="1.0.1"),
("s7", dict(v1="1.0alpha", op="__lt__", v2="1.0.0")), Scenario("s7", v1="1.0alpha", op="__lt__", v2="1.0.0"),
("s8", dict(v1="1.0-alpha", op="__lt__", v2="1.0.0")), Scenario("s8", v1="1.0-alpha", op="__lt__", v2="1.0.0"),
("s9", dict(v1="1.1-alpha", op="__gt__", v2="1.0")), Scenario("s9", v1="1.1-alpha", op="__gt__", v2="1.0"),
("s10", dict(v1="1.0-SNAPSHOT", op="__lt__", v2="1.0")), Scenario("s10", v1="1.0-SNAPSHOT", op="__lt__", v2="1.0"),
("s11", dict(v1="1.0.preview", op="__lt__", v2="1.0")), Scenario("s11", v1="1.0.preview", op="__lt__", v2="1.0"),
("s12", dict(v1="1.1-SNAPSHOT", op="__gt__", v2="1.0")), Scenario("s12", v1="1.1-SNAPSHOT", op="__gt__", v2="1.0"),
("s13", dict(v1="1.0a-SNAPSHOT", op="__lt__", v2="1.0a")), Scenario("s13", v1="1.0a-SNAPSHOT", op="__lt__", v2="1.0a"),
( Scenario(
"s14", "s14", v1="1.4.6-SNAPSHOT (private-0986edd9-example)", op="__lt__", v2="1.4.6"
dict( ),
v1="1.4.6-SNAPSHOT (private-0986edd9-example)", op="__lt__", v2="1.4.6" Scenario(
), "s15", v1="1.4.6-SNAPSHOT (private-0986edd9-example)", op="__gt__", v2="1.4.5"
), ),
( Scenario("s16", v1="1.0.1-1.v1", op="__gt__", v2="1.0.1"),
"s15", Scenario("s17", v1="1.0.1-1.v1", op="__lt__", v2="1.0.2"),
dict( Scenario("s18", v1="1.0.2-1.v1", op="__gt__", v2="1.0.1"),
v1="1.4.6-SNAPSHOT (private-0986edd9-example)", op="__gt__", v2="1.4.5" Scenario("s19", v1="1.0.2-1.v1", op="__gt__", v2="1.0.1-2"),
), ]
),
("s16", dict(v1="1.0.1-1.v1", op="__gt__", v2="1.0.1")),
("s17", dict(v1="1.0.1-1.v1", op="__lt__", v2="1.0.2")), @pytest.fixture(
("s18", dict(v1="1.0.2-1.v1", op="__gt__", v2="1.0.1")), params=scenarios,
("s19", dict(v1="1.0.2-1.v1", op="__gt__", v2="1.0.1-2")), ids=attrgetter("name"),
)
def scenario(request):
return request.param
@pytest.fixture
def config():
config = JJBConfig()
config.validate()
return config
@pytest.fixture
def registry(config, scenario):
plugin_info = [
{
"shortName": "HerpDerpPlugin",
"longName": "Blah Blah Blah Plugin",
},
{
"shortName": "JankyPlugin1",
"longName": "Not A Real Plugin",
"version": scenario.v1,
},
] ]
return ModuleRegistry(config, plugin_info)
def setUp(self):
super(ModuleRegistryPluginInfoTestsWithScenarios, self).setUp()
jjb_config = JJBConfig() def test_get_plugin_info_dict(registry):
jjb_config.validate() """
The goal of this test is to validate that the plugin_info returned by
ModuleRegistry.get_plugin_info is a dictionary whose key 'shortName' is
the same value as the string argument passed to
ModuleRegistry.get_plugin_info.
"""
plugin_name = "JankyPlugin1"
plugin_info = registry.get_plugin_info(plugin_name)
plugin_info = [ assert isinstance(plugin_info, dict)
{"shortName": "HerpDerpPlugin", "longName": "Blah Blah Blah Plugin"} assert plugin_info["shortName"] == plugin_name
]
plugin_info.append(
{
"shortName": "JankyPlugin1",
"longName": "Not A Real Plugin",
"version": self.v1,
}
)
self.addDetail("plugin_info", text_content(str(plugin_info)))
self.registry = ModuleRegistry(jjb_config, plugin_info)
def tearDown(self): def test_get_plugin_info_dict_using_longName(registry):
super(ModuleRegistryPluginInfoTestsWithScenarios, self).tearDown() """
The goal of this test is to validate that the plugin_info returned by
ModuleRegistry.get_plugin_info is a dictionary whose key 'longName' is
the same value as the string argument passed to
ModuleRegistry.get_plugin_info.
"""
plugin_name = "Blah Blah Blah Plugin"
plugin_info = registry.get_plugin_info(plugin_name)
def test_get_plugin_info_dict(self): assert isinstance(plugin_info, dict)
""" assert plugin_info["longName"] == plugin_name
The goal of this test is to validate that the plugin_info returned by
ModuleRegistry.get_plugin_info is a dictionary whose key 'shortName' is
the same value as the string argument passed to
ModuleRegistry.get_plugin_info.
"""
plugin_name = "JankyPlugin1"
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict)
self.assertEqual(plugin_info["shortName"], plugin_name)
def test_get_plugin_info_dict_using_longName(self): def test_get_plugin_info_dict_no_plugin(registry):
""" """
The goal of this test is to validate that the plugin_info returned by The goal of this test case is to validate the behavior of
ModuleRegistry.get_plugin_info is a dictionary whose key 'longName' is ModuleRegistry.get_plugin_info when the given plugin cannot be found in
the same value as the string argument passed to ModuleRegistry's internal representation of the plugins_info.
ModuleRegistry.get_plugin_info. """
""" plugin_name = "PluginDoesNotExist"
plugin_name = "Blah Blah Blah Plugin" plugin_info = registry.get_plugin_info(plugin_name)
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict) assert isinstance(plugin_info, dict)
self.assertEqual(plugin_info["longName"], plugin_name) assert plugin_info == {}
def test_get_plugin_info_dict_no_plugin(self):
"""
The goal of this test case is to validate the behavior of
ModuleRegistry.get_plugin_info when the given plugin cannot be found in
ModuleRegistry's internal representation of the plugins_info.
"""
plugin_name = "PluginDoesNotExist"
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict) def test_get_plugin_info_dict_no_version(registry):
self.assertEqual(plugin_info, {}) """
The goal of this test case is to validate the behavior of
ModuleRegistry.get_plugin_info when the given plugin shortName returns
plugin_info dict that has no version string. In a sane world where
plugin frameworks like Jenkins' are sane this should never happen, but
I am including this test and the corresponding default behavior
because, well, it's Jenkins.
"""
plugin_name = "HerpDerpPlugin"
plugin_info = registry.get_plugin_info(plugin_name)
def test_get_plugin_info_dict_no_version(self): assert isinstance(plugin_info, dict)
""" assert plugin_info["shortName"] == plugin_name
The goal of this test case is to validate the behavior of assert plugin_info["version"] == "0"
ModuleRegistry.get_plugin_info when the given plugin shortName returns
plugin_info dict that has no version string. In a sane world where
plugin frameworks like Jenkins' are sane this should never happen, but
I am including this test and the corresponding default behavior
because, well, it's Jenkins.
"""
plugin_name = "HerpDerpPlugin"
plugin_info = self.registry.get_plugin_info(plugin_name)
self.assertIsInstance(plugin_info, dict)
self.assertEqual(plugin_info["shortName"], plugin_name)
self.assertEqual(plugin_info["version"], "0")
def test_plugin_version_comparison(self): def test_plugin_version_comparison(registry, scenario):
""" """
The goal of this test case is to validate that valid tuple versions are The goal of this test case is to validate that valid tuple versions are
ordinally correct. That is, for each given scenario, v1.op(v2)==True ordinally correct. That is, for each given scenario, v1.op(v2)==True
where 'op' is the equality operator defined for the scenario. where 'op' is the equality operator defined for the scenario.
""" """
plugin_name = "JankyPlugin1" plugin_name = "JankyPlugin1"
plugin_info = self.registry.get_plugin_info(plugin_name) plugin_info = registry.get_plugin_info(plugin_name)
v1 = plugin_info.get("version") v1 = plugin_info.get("version")
op = getattr(pkg_resources.parse_version(v1), self.op) op = getattr(pkg_resources.parse_version(v1), scenario.op)
test = op(pkg_resources.parse_version(self.v2)) test = op(pkg_resources.parse_version(scenario.v2))
self.assertTrue( assert test, (
test, f"Unexpectedly found {v1} {scenario.v2} {scenario.op} == False"
msg="Unexpectedly found {0} {2} {1} == False " " when comparing versions!"
"when comparing versions!".format(v1, self.v2, self.op), )
)

View File

@ -13,10 +13,11 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from testtools.matchers import Equals
import xml.etree.ElementTree as XML import xml.etree.ElementTree as XML
import yaml import yaml
import pytest
from jenkins_jobs.errors import InvalidAttributeError from jenkins_jobs.errors import InvalidAttributeError
from jenkins_jobs.errors import MissingAttributeError from jenkins_jobs.errors import MissingAttributeError
from jenkins_jobs.errors import JenkinsJobsException from jenkins_jobs.errors import JenkinsJobsException
@ -24,111 +25,108 @@ from jenkins_jobs.modules.helpers import (
convert_mapping_to_xml, convert_mapping_to_xml,
check_mutual_exclusive_data_args, check_mutual_exclusive_data_args,
) )
from tests import base
class TestCaseTestHelpers(base.BaseTestCase): def test_convert_mapping_to_xml():
def test_convert_mapping_to_xml(self): """
""" Tests the test_convert_mapping_to_xml_fail_required function
Tests the test_convert_mapping_to_xml_fail_required function """
"""
# Test default values # Test default values
default_root = XML.Element("testdefault") default_root = XML.Element("testdefault")
default_data = yaml.safe_load("string: hello") default_data = yaml.safe_load("string: hello")
default_mappings = [("default-string", "defaultString", "default")] default_mappings = [("default-string", "defaultString", "default")]
convert_mapping_to_xml(
default_root, default_data, default_mappings, fail_required=True
)
result = default_root.find("defaultString").text
result == "default"
# Test user input
user_input_root = XML.Element("testUserInput")
user_input_data = yaml.safe_load("user-input-string: hello")
user_input_mappings = [("user-input-string", "userInputString", "user-input")]
convert_mapping_to_xml(
user_input_root, user_input_data, user_input_mappings, fail_required=True
)
result = user_input_root.find("userInputString").text
result == "hello"
# Test missing required input
required_root = XML.Element("testrequired")
required_data = yaml.safe_load("string: hello")
required_mappings = [("required-string", "requiredString", None)]
with pytest.raises(MissingAttributeError):
convert_mapping_to_xml( convert_mapping_to_xml(
default_root, default_data, default_mappings, fail_required=True
)
result = default_root.find("defaultString").text
self.assertThat(result, Equals("default"))
# Test user input
user_input_root = XML.Element("testUserInput")
user_input_data = yaml.safe_load("user-input-string: hello")
user_input_mappings = [("user-input-string", "userInputString", "user-input")]
convert_mapping_to_xml(
user_input_root, user_input_data, user_input_mappings, fail_required=True
)
result = user_input_root.find("userInputString").text
self.assertThat(result, Equals("hello"))
# Test missing required input
required_root = XML.Element("testrequired")
required_data = yaml.safe_load("string: hello")
required_mappings = [("required-string", "requiredString", None)]
self.assertRaises(
MissingAttributeError,
convert_mapping_to_xml,
required_root, required_root,
required_data, required_data,
required_mappings, required_mappings,
fail_required=True, fail_required=True,
) )
# Test invalid user input for list # Test invalid user input for list
user_input_root = XML.Element("testUserInput") user_input_root = XML.Element("testUserInput")
user_input_data = yaml.safe_load("user-input-string: bye") user_input_data = yaml.safe_load("user-input-string: bye")
valid_inputs = ["hello"] valid_inputs = ["hello"]
user_input_mappings = [ user_input_mappings = [
("user-input-string", "userInputString", "user-input", valid_inputs) ("user-input-string", "userInputString", "user-input", valid_inputs)
] ]
self.assertRaises( with pytest.raises(InvalidAttributeError):
InvalidAttributeError, convert_mapping_to_xml(
convert_mapping_to_xml,
user_input_root, user_input_root,
user_input_data, user_input_data,
user_input_mappings, user_input_mappings,
) )
# Test invalid user input for dict # Test invalid user input for dict
user_input_root = XML.Element("testUserInput") user_input_root = XML.Element("testUserInput")
user_input_data = yaml.safe_load("user-input-string: later") user_input_data = yaml.safe_load("user-input-string: later")
valid_inputs = {"hello": "world"} valid_inputs = {"hello": "world"}
user_input_mappings = [ user_input_mappings = [
("user-input-string", "userInputString", "user-input", valid_inputs) ("user-input-string", "userInputString", "user-input", valid_inputs)
] ]
self.assertRaises( with pytest.raises(InvalidAttributeError):
InvalidAttributeError, convert_mapping_to_xml(
convert_mapping_to_xml,
user_input_root, user_input_root,
user_input_data, user_input_data,
user_input_mappings, user_input_mappings,
) )
# Test invalid key for dict # Test invalid key for dict
user_input_root = XML.Element("testUserInput") user_input_root = XML.Element("testUserInput")
user_input_data = yaml.safe_load("user-input-string: world") user_input_data = yaml.safe_load("user-input-string: world")
valid_inputs = {"hello": "world"} valid_inputs = {"hello": "world"}
user_input_mappings = [ user_input_mappings = [
("user-input-string", "userInputString", "user-input", valid_inputs) ("user-input-string", "userInputString", "user-input", valid_inputs)
] ]
self.assertRaises( with pytest.raises(InvalidAttributeError):
InvalidAttributeError, convert_mapping_to_xml(
convert_mapping_to_xml,
user_input_root, user_input_root,
user_input_data, user_input_data,
user_input_mappings, user_input_mappings,
) )
def test_check_mutual_exclusive_data_args_no_mutual_exclusive(self):
@check_mutual_exclusive_data_args(0, "foo", "bar")
@check_mutual_exclusive_data_args(0, "foo", "baz")
def func(data):
pass
func({"baz": "qaz", "bar": "qaz"}) def test_check_mutual_exclusive_data_args_no_mutual_exclusive():
@check_mutual_exclusive_data_args(0, "foo", "bar")
@check_mutual_exclusive_data_args(0, "foo", "baz")
def func(data):
pass
def test_check_mutual_exclusive_data_args_mutual_exclusive(self): func({"baz": "qaz", "bar": "qaz"})
@check_mutual_exclusive_data_args(0, "foo", "bar")
@check_mutual_exclusive_data_args(0, "foo", "baz")
def func(data):
pass
self.assertRaises(JenkinsJobsException, func, {"foo": "qaz", "bar": "qaz"})
def test_check_mutual_exclusive_data_args_mutual_exclusive():
@check_mutual_exclusive_data_args(0, "foo", "bar")
@check_mutual_exclusive_data_args(0, "foo", "baz")
def func(data):
pass
with pytest.raises(JenkinsJobsException):
func({"foo": "qaz", "bar": "qaz"})

View File

@ -13,15 +13,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from tests import base from operator import attrgetter
from tests.base import mock from pathlib import Path
import os
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import project_multibranch from jenkins_jobs.modules import project_multibranch
@mock.patch("uuid.uuid4", mock.Mock(return_value="1-1-1-1-1")) fixtures_dir = Path(__file__).parent / "fixtures"
class TestCaseMultibranchPipeline(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path) @pytest.fixture(
default_config_file = "/dev/null" params=scenario_list(fixtures_dir),
klass = project_multibranch.WorkflowMultiBranch ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(project_multibranch.WorkflowMultiBranch)

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import notifications from jenkins_jobs.modules import notifications
from tests import base
class TestCaseModuleNotifications(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = notifications.Notifications @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(notifications.Notifications)

View File

@ -15,53 +15,52 @@
import time import time
from multiprocessing import cpu_count from multiprocessing import cpu_count
from testtools import matchers
from testtools import TestCase
from jenkins_jobs.parallel import concurrent from jenkins_jobs.parallel import concurrent
from tests.base import mock
class TestCaseParallel(TestCase): def test_parallel_correct_order():
def test_parallel_correct_order(self): expected = list(range(10, 20))
expected = list(range(10, 20))
@concurrent @concurrent
def parallel_test(num_base, num_extra): def parallel_test(num_base, num_extra):
return num_base + num_extra return num_base + num_extra
parallel_args = [{"num_extra": num} for num in range(10)] parallel_args = [{"num_extra": num} for num in range(10)]
result = parallel_test(10, concurrent=parallel_args) result = parallel_test(10, concurrent=parallel_args)
self.assertThat(result, matchers.Equals(expected)) assert result == expected
def test_parallel_time_less_than_serial(self):
@concurrent
def wait(secs):
time.sleep(secs)
before = time.time() def test_parallel_time_less_than_serial():
# ten threads to make it as fast as possible @concurrent
wait(concurrent=[{"secs": 1} for _ in range(10)], n_workers=10) def wait(secs):
after = time.time() time.sleep(secs)
self.assertThat(after - before, matchers.LessThan(5))
def test_parallel_single_thread(self): before = time.time()
expected = list(range(10, 20)) # ten threads to make it as fast as possible
wait(concurrent=[{"secs": 1} for _ in range(10)], n_workers=10)
after = time.time()
assert after - before < 5
@concurrent
def parallel_test(num_base, num_extra):
return num_base + num_extra
parallel_args = [{"num_extra": num} for num in range(10)] def test_parallel_single_thread():
result = parallel_test(10, concurrent=parallel_args, n_workers=1) expected = list(range(10, 20))
self.assertThat(result, matchers.Equals(expected))
@mock.patch("jenkins_jobs.parallel.cpu_count", wraps=cpu_count) @concurrent
def test_use_auto_detect_cores(self, mockCpu_count): def parallel_test(num_base, num_extra):
@concurrent return num_base + num_extra
def parallel_test():
return True
result = parallel_test(concurrent=[{} for _ in range(10)], n_workers=0) parallel_args = [{"num_extra": num} for num in range(10)]
self.assertThat(result, matchers.Equals([True for _ in range(10)])) result = parallel_test(10, concurrent=parallel_args, n_workers=1)
mockCpu_count.assert_called_once_with() result == expected
def test_use_auto_detect_cores(mocker):
mock = mocker.patch("jenkins_jobs.parallel.cpu_count", wraps=cpu_count)
@concurrent
def parallel_test():
return True
result = parallel_test(concurrent=[{} for _ in range(10)], n_workers=0)
assert result == [True for _ in range(10)]
mock.assert_called_once_with()

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import parameters from jenkins_jobs.modules import parameters
from tests import base
class TestCaseModuleParameters(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = parameters.Parameters @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(parameters.Parameters)

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import properties from jenkins_jobs.modules import properties
from tests import base
class TestCaseModuleProperties(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = properties.Properties @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(properties.Properties)

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import publishers from jenkins_jobs.modules import publishers
from tests import base
class TestCaseModulePublishers(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = publishers.Publishers @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(publishers.Publishers)

View File

@ -14,13 +14,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import reporters from jenkins_jobs.modules import reporters
from tests import base
class TestCaseModuleReporters(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = reporters.Reporters @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(reporters.Reporters)

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import scm from jenkins_jobs.modules import scm
from tests import base
class TestCaseModuleSCM(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = scm.SCM @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(scm.SCM)

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import triggers from jenkins_jobs.modules import triggers
from tests import base
class TestCaseModuleTriggers(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = triggers.Triggers @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(triggers.Triggers)

View File

@ -12,47 +12,62 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License.import os # limitations under the License.import os
import os from operator import attrgetter
from pathlib import Path
import pytest
from jenkins_jobs.modules import view_all from jenkins_jobs.modules import view_all
from jenkins_jobs.modules import view_delivery_pipeline from jenkins_jobs.modules import view_delivery_pipeline
from jenkins_jobs.modules import view_list from jenkins_jobs.modules import view_list
from jenkins_jobs.modules import view_nested from jenkins_jobs.modules import view_nested
from jenkins_jobs.modules import view_pipeline from jenkins_jobs.modules import view_pipeline
from jenkins_jobs.modules import view_sectioned from jenkins_jobs.modules import view_sectioned
from tests import base from tests.enum_scenarios import scenario_list
class TestCaseModuleViewAll(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = view_all.All
class TestCaseModuleViewDeliveryPipeline(base.BaseScenariosTestCase): @pytest.fixture(
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") params=scenario_list(fixtures_dir),
scenarios = base.get_scenarios(fixtures_path) ids=attrgetter("name"),
klass = view_delivery_pipeline.DeliveryPipeline )
def scenario(request):
return request.param
class TestCaseModuleViewList(base.BaseScenariosTestCase): # But actually this is a view.
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") @pytest.fixture
scenarios = base.get_scenarios(fixtures_path) def project(input, registry):
klass = view_list.List type_to_class = {
"all": view_all.All,
"delivery_pipeline": view_delivery_pipeline.DeliveryPipeline,
"list": view_list.List,
"nested": view_nested.Nested,
"pipeline": view_pipeline.Pipeline,
"sectioned": view_sectioned.Sectioned,
}
try:
class_name = input["view-type"]
except KeyError:
raise RuntimeError("'view-type' element is expected in input yaml")
cls = type_to_class[class_name]
return cls(registry)
class TestCaseModuleViewNested(base.BaseScenariosTestCase): view_class_list = [
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") view_all.All,
scenarios = base.get_scenarios(fixtures_path) view_delivery_pipeline.DeliveryPipeline,
klass = view_nested.Nested view_list.List,
view_nested.Nested,
view_pipeline.Pipeline,
view_sectioned.Sectioned,
]
class TestCaseModuleViewPipeline(base.BaseScenariosTestCase): @pytest.mark.parametrize(
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures") "view_class", [pytest.param(cls, id=cls.__name__) for cls in view_class_list]
scenarios = base.get_scenarios(fixtures_path) )
klass = view_pipeline.Pipeline def test_view(view_class, check_generator):
check_generator(view_class)
class TestCaseModuleViewSectioned(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = view_sectioned.Sectioned

View File

@ -15,13 +15,25 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import wrappers from jenkins_jobs.modules import wrappers
from tests import base
class TestCaseModuleWrappers(base.BaseScenariosTestCase): fixtures_dir = Path(__file__).parent / "fixtures"
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = wrappers.Wrappers @pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(wrappers.Wrappers)

View File

@ -12,65 +12,67 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from pathlib import Path
from jenkins_jobs import errors import pytest
from jenkins_jobs import parser
from jenkins_jobs import registry
from jenkins_jobs import xml_config
from tests import base from jenkins_jobs.config import JJBConfig
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJobGenerator, XmlViewGenerator
class TestXmlJobGeneratorExceptions(base.BaseTestCase): fixtures_dir = Path(__file__).parent / "exceptions"
fixtures_path = os.path.join(os.path.dirname(__file__), "exceptions")
def test_invalid_project(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config) @pytest.fixture
yp.parse(os.path.join(self.fixtures_path, "invalid_project.yaml")) def config():
config = JJBConfig()
config.validate()
return config
reg = registry.ModuleRegistry(config)
job_data, _ = yp.expandYaml(reg)
# Generate the XML tree @pytest.fixture
xml_generator = xml_config.XmlJobGenerator(reg) def parser(config):
e = self.assertRaises( return YamlParser(config)
errors.JenkinsJobsException, xml_generator.generateXML, job_data
)
self.assertIn("Unrecognized project-type:", str(e))
def test_invalid_view(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config) @pytest.fixture
yp.parse(os.path.join(self.fixtures_path, "invalid_view.yaml")) def registry(config):
return ModuleRegistry(config)
reg = registry.ModuleRegistry(config)
_, view_data = yp.expandYaml(reg)
# Generate the XML tree def test_invalid_project(parser, registry):
xml_generator = xml_config.XmlViewGenerator(reg) parser.parse(str(fixtures_dir / "invalid_project.yaml"))
e = self.assertRaises( jobs, views = parser.expandYaml(registry)
errors.JenkinsJobsException, xml_generator.generateXML, view_data
)
self.assertIn("Unrecognized view-type:", str(e))
def test_incorrect_template_params(self): generator = XmlJobGenerator(registry)
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config) with pytest.raises(JenkinsJobsException) as excinfo:
yp.parse(os.path.join(self.fixtures_path, "failure_formatting_component.yaml")) generator.generateXML(jobs)
assert "Unrecognized project-type:" in str(excinfo.value)
reg = registry.ModuleRegistry(config)
reg.set_parser_data(yp.data)
job_data_list, view_data_list = yp.expandYaml(reg)
xml_generator = xml_config.XmlJobGenerator(reg) def test_invalid_view(parser, registry):
self.assertRaises(Exception, xml_generator.generateXML, job_data_list) parser.parse(str(fixtures_dir / "invalid_view.yaml"))
self.assertIn("Failure formatting component", self.logger.output) jobs, views = parser.expandYaml(registry)
self.assertIn("Problem formatting with args", self.logger.output)
generator = XmlViewGenerator(registry)
with pytest.raises(JenkinsJobsException) as excinfo:
generator.generateXML(views)
assert "Unrecognized view-type:" in str(excinfo.value)
def test_template_params(caplog, parser, registry):
parser.parse(str(fixtures_dir / "failure_formatting_component.yaml"))
registry.set_parser_data(parser.data)
jobs, views = parser.expandYaml(registry)
generator = XmlJobGenerator(registry)
with pytest.raises(Exception):
generator.generateXML(jobs)
assert "Failure formatting component" in caplog.text
assert "Problem formatting with args" in caplog.text

View File

@ -0,0 +1,41 @@
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
fixtures_dir = Path(__file__).parent / "fixtures"
@pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_job):
# Some tests using config with 'include_path' expect JJB root to be current directory.
os.chdir(Path(__file__).parent / "../..")
check_job()

View File

@ -0,0 +1,53 @@
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from pathlib import Path
import pytest
exceptions_dir = Path(__file__).parent / "exceptions"
# Override to avoid scenarios usage.
@pytest.fixture
def config_path():
return os.devnull
# Override to avoid scenarios usage.
@pytest.fixture
def plugins_info():
return None
def test_incorrect_template_dimensions(caplog, check_parser):
in_path = exceptions_dir / "incorrect_template_dimensions.yaml"
with pytest.raises(Exception) as excinfo:
check_parser(in_path)
assert "'NoneType' object is not iterable" in str(excinfo.value)
assert "- branch: current\n current: null" in caplog.text
@pytest.mark.parametrize("name", ["template", "params"])
def test_failure_formatting(caplog, check_parser, name):
in_path = exceptions_dir / f"failure_formatting_{name}.yaml"
with pytest.raises(Exception):
check_parser(in_path)
assert f"Failure formatting {name}" in caplog.text
assert "Problem formatting with args" in caplog.text

View File

@ -1,67 +0,0 @@
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from jenkins_jobs import parser
from jenkins_jobs import registry
from tests import base
class TestCaseModuleYamlInclude(base.SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
class TestYamlParserExceptions(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), "exceptions")
def test_incorrect_template_dimensions(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config)
yp.parse(os.path.join(self.fixtures_path, "incorrect_template_dimensions.yaml"))
reg = registry.ModuleRegistry(config)
e = self.assertRaises(Exception, yp.expandYaml, reg)
self.assertIn("'NoneType' object is not iterable", str(e))
self.assertIn("- branch: current\n current: null", self.logger.output)
class TestYamlParserFailureFormattingExceptions(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), "exceptions")
scenarios = [("s1", {"name": "template"}), ("s2", {"name": "params"})]
def test_yaml_snippet(self):
self.conf_filename = None
config = self._get_config()
yp = parser.YamlParser(config)
yp.parse(
os.path.join(
self.fixtures_path, "failure_formatting_{}.yaml".format(self.name)
)
)
reg = registry.ModuleRegistry(config)
self.assertRaises(Exception, yp.expandYaml, reg)
self.assertIn("Failure formatting {}".format(self.name), self.logger.output)
self.assertIn("Problem formatting with args", self.logger.output)

View File

@ -22,7 +22,7 @@ commands =
- find . -type d -name "__pycache__" -delete - find . -type d -name "__pycache__" -delete
# test that we can call jjb using both variants with same results # test that we can call jjb using both variants with same results
bash {toxinidir}/tools/test-commands.sh bash {toxinidir}/tools/test-commands.sh
stestr run --slowest {posargs} pytest {posargs}
whitelist_externals = whitelist_externals =
bash bash
find find
@ -34,16 +34,14 @@ commands =
bash -c "if [ -d {toxinidir}/../python-jenkins ]; then \ bash -c "if [ -d {toxinidir}/../python-jenkins ]; then \
pip install -q -U -e 'git+file://{toxinidir}/../python-jenkins#egg=python-jenkins' ; else \ pip install -q -U -e 'git+file://{toxinidir}/../python-jenkins#egg=python-jenkins' ; else \
pip install -q -U -e 'git+https://git.openstack.org/openstack/python-jenkins@master#egg=python-jenkins' ; fi " pip install -q -U -e 'git+https://git.openstack.org/openstack/python-jenkins@master#egg=python-jenkins' ; fi "
stestr run --slowest {posargs} pytest {posargs}
[testenv:cover] [testenv:cover]
setenv = setenv =
{[testenv]setenv} {[testenv]setenv}
PYTHON=coverage run --source jenkins_jobs --parallel-mode
commands = commands =
{[tox]install_test_deps} {[tox]install_test_deps}
stestr run {posargs} coverage run --source jenkins_jobs -m pytest {posargs}
coverage combine
coverage html -d cover coverage html -d cover
coverage xml -o cover/coverage.xml coverage xml -o cover/coverage.xml