Move tests to pytest

Pytest makes each scenario into individual selectable test.
To be able to run each scenario separately is very useful for development.

Change-Id: I4b1c990a1fd839ce327cd7faa27159a9b9632fed
This commit is contained in:
Vsevolod Fedorov 2022-06-15 11:12:23 +03:00
parent 248a2bddb7
commit f4d64f9f66
49 changed files with 2042 additions and 1953 deletions

View File

@ -90,6 +90,10 @@ execute the command::
tox -e py38
Unit tests could be run in parallel, using pytest-parallel pytest plugin::
tox -e py38 -- --workers=auto
* Note: View ``tox.ini`` to run tests on other versions of Python,
generating the documentation and additionally for any special notes
on running the test to validate documentation external URLs from behind

View File

@ -3,13 +3,11 @@
# process, which may cause wedges in the gate later.
coverage>=4.0 # Apache-2.0
fixtures>=3.0.0 # Apache-2.0/BSD
python-subunit>=0.0.18 # Apache-2.0/BSD
sphinx>=4.4.0
testscenarios>=0.4 # Apache-2.0/BSD
testtools>=1.4.0 # MIT
stestr>=2.0.0,!=3.0.0 # Apache-2.0/BSD
tox>=2.9.1 # MIT
mock>=2.0; python_version < '3.0' # BSD
sphinxcontrib-programoutput
pluggy<1.0.0 # the last version that supports Python 2
pytest==7.1.2
pytest-mock==3.7.0
pytest-parallel==0.1.1

View File

View File

@ -1,409 +0,0 @@
#!/usr/bin/env python
#
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import doctest
import configparser
import io
import json
import logging
import os
import pkg_resources
import re
import xml.etree.ElementTree as XML
import fixtures
import six
from six.moves import StringIO
import testtools
from testtools.content import text_content
import testscenarios
from yaml import safe_dump
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.errors import InvalidAttributeError
import jenkins_jobs.local_yaml as yaml
from jenkins_jobs.alphanum import AlphanumSort
from jenkins_jobs.modules import project_externaljob
from jenkins_jobs.modules import project_flow
from jenkins_jobs.modules import project_githuborg
from jenkins_jobs.modules import project_matrix
from jenkins_jobs.modules import project_maven
from jenkins_jobs.modules import project_multibranch
from jenkins_jobs.modules import project_multijob
from jenkins_jobs.modules import view_all
from jenkins_jobs.modules import view_delivery_pipeline
from jenkins_jobs.modules import view_list
from jenkins_jobs.modules import view_nested
from jenkins_jobs.modules import view_pipeline
from jenkins_jobs.modules import view_sectioned
from jenkins_jobs.parser import YamlParser
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.xml_config import XmlJob
from jenkins_jobs.xml_config import XmlJobGenerator
# This dance deals with the fact that we want unittest.mock if
# we're on Python 3.4 and later, and non-stdlib mock otherwise.
try:
from unittest import mock # noqa
except ImportError:
import mock # noqa
def get_scenarios(
fixtures_path,
in_ext="yaml",
out_ext="xml",
plugins_info_ext="plugins_info.yaml",
filter_func=None,
):
"""Returns a list of scenarios, each scenario being described
by two parameters (yaml and xml filenames by default).
- content of the fixture output file (aka expected)
"""
scenarios = []
files = {}
for dirpath, _, fs in os.walk(fixtures_path):
for fn in fs:
if fn in files:
files[fn].append(os.path.join(dirpath, fn))
else:
files[fn] = [os.path.join(dirpath, fn)]
input_files = [
files[f][0] for f in files if re.match(r".*\.{0}$".format(in_ext), f)
]
for input_filename in input_files:
if input_filename.endswith(plugins_info_ext):
continue
if callable(filter_func) and filter_func(input_filename):
continue
output_candidate = re.sub(
r"\.{0}$".format(in_ext), ".{0}".format(out_ext), input_filename
)
# assume empty file if no output candidate found
if os.path.basename(output_candidate) in files:
out_filenames = files[os.path.basename(output_candidate)]
else:
out_filenames = None
plugins_info_candidate = re.sub(
r"\.{0}$".format(in_ext), ".{0}".format(plugins_info_ext), input_filename
)
if os.path.basename(plugins_info_candidate) not in files:
plugins_info_candidate = None
conf_candidate = re.sub(r"\.yaml$|\.json$", ".conf", input_filename)
conf_filename = files.get(os.path.basename(conf_candidate), None)
if conf_filename:
conf_filename = conf_filename[0]
else:
# for testing purposes we want to avoid using user config files
conf_filename = os.devnull
scenarios.append(
(
input_filename,
{
"in_filename": input_filename,
"out_filenames": out_filenames,
"conf_filename": conf_filename,
"plugins_info_filename": plugins_info_candidate,
},
)
)
return scenarios
class BaseTestCase(testtools.TestCase):
# TestCase settings:
maxDiff = None # always dump text difference
longMessage = True # keep normal error message when providing our
def setUp(self):
super(BaseTestCase, self).setUp()
self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
def _read_utf8_content(self):
# if None assume empty file
if not self.out_filenames:
return ""
# Read XML content, assuming it is unicode encoded
xml_content = ""
for f in sorted(self.out_filenames):
with io.open(f, "r", encoding="utf-8") as xml_file:
xml_content += "%s" % xml_file.read()
return xml_content
def _read_yaml_content(self, filename):
with io.open(filename, "r", encoding="utf-8") as yaml_file:
yaml_content = yaml.load(yaml_file)
return yaml_content
def _get_config(self):
jjb_config = JJBConfig(self.conf_filename)
jjb_config.validate()
return jjb_config
class BaseScenariosTestCase(testscenarios.TestWithScenarios, BaseTestCase):
scenarios = []
fixtures_path = None
@mock.patch("pkg_resources.iter_entry_points")
def test_yaml_snippet(self, mock):
if not self.in_filename:
return
jjb_config = self._get_config()
expected_xml = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail(
"plugins-info-filename", text_content(self.plugins_info_filename)
)
self.addDetail("plugins-info", text_content(str(plugins_info)))
parser = YamlParser(jjb_config)
e = pkg_resources.EntryPoint.parse
d = pkg_resources.Distribution()
config = configparser.ConfigParser()
config.read(os.path.dirname(__file__) + "/../setup.cfg")
groups = {}
for key in config["entry_points"]:
groups[key] = list()
for line in config["entry_points"][key].split("\n"):
if "" == line.strip():
continue
groups[key].append(e(line, dist=d))
def mock_iter_entry_points(group, name=None):
return (
entry for entry in groups[group] if name is None or name == entry.name
)
mock.side_effect = mock_iter_entry_points
registry = ModuleRegistry(jjb_config, plugins_info)
registry.set_parser_data(parser.data)
pub = self.klass(registry)
project = None
if "project-type" in yaml_content:
if yaml_content["project-type"] == "maven":
project = project_maven.Maven(registry)
elif yaml_content["project-type"] == "matrix":
project = project_matrix.Matrix(registry)
elif yaml_content["project-type"] == "flow":
project = project_flow.Flow(registry)
elif yaml_content["project-type"] == "githuborg":
project = project_githuborg.GithubOrganization(registry)
elif yaml_content["project-type"] == "multijob":
project = project_multijob.MultiJob(registry)
elif yaml_content["project-type"] == "multibranch":
project = project_multibranch.WorkflowMultiBranch(registry)
elif yaml_content["project-type"] == "multibranch-defaults":
project = project_multibranch.WorkflowMultiBranchDefaults(
registry
) # noqa
elif yaml_content["project-type"] == "externaljob":
project = project_externaljob.ExternalJob(registry)
if "view-type" in yaml_content:
if yaml_content["view-type"] == "all":
project = view_all.All(registry)
elif yaml_content["view-type"] == "delivery_pipeline":
project = view_delivery_pipeline.DeliveryPipeline(registry)
elif yaml_content["view-type"] == "list":
project = view_list.List(registry)
elif yaml_content["view-type"] == "nested":
project = view_nested.Nested(registry)
elif yaml_content["view-type"] == "pipeline":
project = view_pipeline.Pipeline(registry)
elif yaml_content["view-type"] == "sectioned":
project = view_sectioned.Sectioned(registry)
else:
raise InvalidAttributeError("view-type", yaml_content["view-type"])
if project:
xml_project = project.root_xml(yaml_content)
else:
xml_project = XML.Element("project")
# Generate the XML tree directly with modules/general
pub.gen_xml(xml_project, yaml_content)
# check output file is under correct path
if "name" in yaml_content:
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = [
os.path.normpath(
os.path.join(
prefix,
"/".join(parser._getfullname(yaml_content).split("/")[:-1]),
)
)
]
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
self.assertEquals(
expected_folders,
actual_folders,
"Output file under wrong path, was '%s', should be '%s'"
% (
self.out_filenames[0],
os.path.join(
expected_folders[0], os.path.basename(self.out_filenames[0])
),
),
)
# Prettify generated XML
pretty_xml = XmlJob(xml_project, "fixturejob").output().decode("utf-8")
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(
expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class SingleJobTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
config = self._get_config()
expected_xml = (
self._read_utf8_content()
.strip()
.replace("<BLANKLINE>", "")
.replace("\n\n", "\n")
)
parser = YamlParser(config)
parser.parse(self.in_filename)
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail(
"plugins-info-filename", text_content(self.plugins_info_filename)
)
self.addDetail("plugins-info", text_content(str(plugins_info)))
registry = ModuleRegistry(config, plugins_info)
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry)
# Generate the XML tree
xml_generator = XmlJobGenerator(registry)
xml_jobs = xml_generator.generateXML(job_data_list)
xml_jobs.sort(key=AlphanumSort)
# check reference files are under correct path for folders
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = list(
set(
[
os.path.normpath(
os.path.join(prefix, "/".join(job_data["name"].split("/")[:-1]))
)
for job_data in job_data_list
]
)
)
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
six.assertCountEqual(
self,
expected_folders,
actual_folders,
"Output file under wrong path, was '%s', should be '%s'"
% (
self.out_filenames[0],
os.path.join(
expected_folders[0], os.path.basename(self.out_filenames[0])
),
),
)
# Prettify generated XML
pretty_xml = (
"\n".join(job.output().decode("utf-8") for job in xml_jobs)
.strip()
.replace("\n\n", "\n")
)
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(
expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class JsonTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
expected_json = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
pretty_json = json.dumps(yaml_content, indent=4, separators=(",", ": "))
self.assertThat(
pretty_json,
testtools.matchers.DocTestMatches(
expected_json, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)
class YamlTestCase(BaseScenariosTestCase):
def test_yaml_snippet(self):
expected_yaml = self._read_utf8_content()
yaml_content = self._read_yaml_content(self.in_filename)
# using json forces expansion of yaml anchors and aliases in the
# outputted yaml, otherwise it would simply appear exactly as
# entered which doesn't show that the net effect of the yaml
data = StringIO(json.dumps(yaml_content))
pretty_yaml = safe_dump(json.load(data), default_flow_style=False)
self.assertThat(
pretty_yaml,
testtools.matchers.DocTestMatches(
expected_yaml, doctest.ELLIPSIS | doctest.REPORT_NDIFF
),
)

View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
#
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
@ -15,13 +17,25 @@
# License for the specific language governing permissions and limitations
# under the License.
import os
from operator import attrgetter
from pathlib import Path
import pytest
from tests.enum_scenarios import scenario_list
from jenkins_jobs.modules import builders
from tests import base
class TestCaseModuleBuilders(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), "fixtures")
scenarios = base.get_scenarios(fixtures_path)
klass = builders.Builders
fixtures_dir = Path(__file__).parent / "fixtures"
@pytest.fixture(
params=scenario_list(fixtures_dir),
ids=attrgetter("name"),
)
def scenario(request):
return request.param
def test_yaml_snippet(check_generator):
check_generator(builders.Builders)

View File

@ -13,33 +13,40 @@
# License for the specific language governing permissions and limitations
# under the License.
import os
import os.path
import pytest
import jenkins_jobs
from tests import base
from tests.base import mock
class TestCaseJobCache(base.BaseTestCase):
@mock.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
def test_save_on_exit(self):
"""
Test that the cache is saved on normal object deletion
"""
# Override fixture - do not use this mock.
@pytest.fixture(autouse=True)
def job_cache_mocked(mocker):
pass
with mock.patch("jenkins_jobs.builder.JobCache.save") as save_mock:
with mock.patch("os.path.isfile", return_value=False):
with mock.patch("jenkins_jobs.builder.JobCache._lock"):
jenkins_jobs.builder.JobCache("dummy")
save_mock.assert_called_with()
@mock.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
def test_cache_file(self):
"""
Test providing a cachefile.
"""
test_file = os.path.abspath(__file__)
with mock.patch("os.path.join", return_value=test_file):
with mock.patch("yaml.safe_load"):
with mock.patch("jenkins_jobs.builder.JobCache._lock"):
jenkins_jobs.builder.JobCache("dummy").data = None
def test_save_on_exit(mocker):
"""
Test that the cache is saved on normal object deletion
"""
mocker.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
save_mock = mocker.patch("jenkins_jobs.builder.JobCache.save")
mocker.patch("os.path.isfile", return_value=False)
mocker.patch("jenkins_jobs.builder.JobCache._lock")
jenkins_jobs.builder.JobCache("dummy")
save_mock.assert_called_with()
def test_cache_file(mocker):
"""
Test providing a cachefile.
"""
mocker.patch("jenkins_jobs.builder.JobCache.get_cache_dir", lambda x: "/bad/file")
test_file = os.path.abspath(__file__)
mocker.patch("os.path.join", return_value=test_file)
mocker.patch("yaml.safe_load")
mocker.patch("jenkins_jobs.builder.JobCache._lock")
jenkins_jobs.builder.JobCache("dummy").data = None

View File

24
tests/cmd/conftest.py Normal file
View File

@ -0,0 +1,24 @@
from pathlib import Path
import pytest
from jenkins_jobs.cli import entry
@pytest.fixture
def fixtures_dir():
return Path(__file__).parent / "fixtures"
@pytest.fixture
def default_config_file(fixtures_dir):
return str(fixtures_dir / "empty_builder.ini")
@pytest.fixture
def execute_jenkins_jobs():
def execute(args):
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
return execute

View File

@ -0,0 +1,3 @@
[jenkins]
url=http://test-jenkins.with.non.default.url:8080/
query_plugins_info=True

View File

@ -18,55 +18,57 @@
# of actions by the JJB library, usually through interaction with the
# python-jenkins library.
import os
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
from unittest import mock
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class DeleteTests(CmdTestsBase):
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_jobs")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_views")
def test_delete_single_job(self, delete_job_mock, delete_view_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
def test_delete_single_job(mocker, default_config_file, execute_jenkins_jobs):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ["--conf", self.default_config_file, "delete", "test_job"]
self.execute_jenkins_jobs_with_args(args)
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_views")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_jobs")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_views")
def test_delete_multiple_jobs(self, delete_job_mock, delete_view_mock):
"""
Test handling the deletion of multiple Jenkins jobs.
"""
args = ["--conf", default_config_file, "delete", "test_job"]
execute_jenkins_jobs(args)
args = ["--conf", self.default_config_file, "delete", "test_job1", "test_job2"]
self.execute_jenkins_jobs_with_args(args)
@mock.patch("jenkins_jobs.builder.JenkinsManager.delete_job")
def test_delete_using_glob_params(self, delete_job_mock):
"""
Test handling the deletion of multiple Jenkins jobs using the glob
parameters feature.
"""
def test_delete_multiple_jobs(mocker, default_config_file, execute_jenkins_jobs):
"""
Test handling the deletion of multiple Jenkins jobs.
"""
args = [
"--conf",
self.default_config_file,
"delete",
"--path",
os.path.join(self.fixtures_path, "cmd-002.yaml"),
"*bar*",
]
self.execute_jenkins_jobs_with_args(args)
calls = [mock.call("bar001"), mock.call("bar002")]
delete_job_mock.assert_has_calls(calls, any_order=True)
self.assertEqual(
delete_job_mock.call_count,
len(calls),
"Jenkins.delete_job() was called '%s' times when "
"expected '%s'" % (delete_job_mock.call_count, len(calls)),
)
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_jobs")
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_views")
args = ["--conf", default_config_file, "delete", "test_job1", "test_job2"]
execute_jenkins_jobs(args)
def test_delete_using_glob_params(
mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Test handling the deletion of multiple Jenkins jobs using the glob
parameters feature.
"""
delete_job_mock = mocker.patch("jenkins_jobs.builder.JenkinsManager.delete_job")
args = [
"--conf",
default_config_file,
"delete",
"--path",
str(fixtures_dir / "cmd-002.yaml"),
"*bar*",
]
execute_jenkins_jobs(args)
calls = [mock.call("bar001"), mock.call("bar002")]
delete_job_mock.assert_has_calls(calls, any_order=True)
assert delete_job_mock.call_count == len(
calls
), "Jenkins.delete_job() was called '%s' times when " "expected '%s'" % (
delete_job_mock.call_count,
len(calls),
)

View File

@ -17,31 +17,30 @@
# of actions by the JJB library, usually through interaction with the
# python-jenkins library.
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
import pytest
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class DeleteAllTests(CmdTestsBase):
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_all_jobs")
def test_delete_all_accept(self, delete_job_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
def test_delete_all_accept(mocker, default_config_file, execute_jenkins_jobs):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ["--conf", self.default_config_file, "delete-all"]
with mock.patch(
"jenkins_jobs.builder.JenkinsManager.get_views", return_value=[None]
):
with mock.patch("jenkins_jobs.utils.input", return_value="y"):
self.execute_jenkins_jobs_with_args(args)
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.builder.JenkinsManager.get_views", return_value=[None])
mocker.patch("jenkins_jobs.utils.input", return_value="y")
@mock.patch("jenkins_jobs.cli.subcommand.update." "JenkinsManager.delete_all_jobs")
def test_delete_all_abort(self, delete_job_mock):
"""
Test handling the deletion of a single Jenkins job.
"""
args = ["--conf", default_config_file, "delete-all"]
execute_jenkins_jobs(args)
args = ["--conf", self.default_config_file, "delete-all"]
with mock.patch("jenkins_jobs.utils.input", return_value="n"):
self.assertRaises(SystemExit, self.execute_jenkins_jobs_with_args, args)
def test_delete_all_abort(mocker, default_config_file, execute_jenkins_jobs):
"""
Test handling the deletion of a single Jenkins job.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.JenkinsManager.delete_all_jobs")
mocker.patch("jenkins_jobs.utils.input", return_value="n")
args = ["--conf", default_config_file, "delete-all"]
with pytest.raises(SystemExit):
execute_jenkins_jobs(args)

View File

@ -12,87 +12,83 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import io
import os
from testscenarios.testcase import TestWithScenarios
from collections import namedtuple
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
import pytest
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class ListFromJenkinsTests(TestWithScenarios, CmdTestsBase):
JobsScenario = namedtuple("JobsScnenario", "name jobs globs found")
scenarios = [
("single", dict(jobs=["job1"], globs=[], found=["job1"])),
("multiple", dict(jobs=["job1", "job2"], globs=[], found=["job1", "job2"])),
(
"multiple_with_folder",
dict(
jobs=["folder1", "folder1/job1", "folder1/job2"],
globs=[],
found=["folder1", "folder1/job1", "folder1/job2"],
),
),
(
"multiple_with_glob",
dict(
jobs=["job1", "job2", "job3"],
globs=["job[1-2]"],
found=["job1", "job2"],
),
),
(
"multiple_with_multi_glob",
dict(
jobs=["job1", "job2", "job3", "job4"],
globs=["job1", "job[24]"],
found=["job1", "job2", "job4"],
),
),
]
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_jobs")
def test_list(self, get_jobs_mock):
def _get_jobs():
return [{"fullname": fullname} for fullname in self.jobs]
get_jobs_mock.side_effect = _get_jobs
console_out = io.BytesIO()
args = ["--conf", self.default_config_file, "list"] + self.globs
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(args)
self.assertEqual(
console_out.getvalue().decode("utf-8").rstrip(), ("\n".join(self.found))
)
jobs_scenarios = [
JobsScenario("single", jobs=["job1"], globs=[], found=["job1"]),
JobsScenario("multiple", jobs=["job1", "job2"], globs=[], found=["job1", "job2"]),
JobsScenario(
"multiple_with_folder",
jobs=["folder1", "folder1/job1", "folder1/job2"],
globs=[],
found=["folder1", "folder1/job1", "folder1/job2"],
),
JobsScenario(
"multiple_with_glob",
jobs=["job1", "job2", "job3"],
globs=["job[1-2]"],
found=["job1", "job2"],
),
JobsScenario(
"multiple_with_multi_glob",
jobs=["job1", "job2", "job3", "job4"],
globs=["job1", "job[24]"],
found=["job1", "job2", "job4"],
),
]
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class ListFromYamlTests(TestWithScenarios, CmdTestsBase):
@pytest.mark.parametrize(
"scenario",
[pytest.param(s, id=s.name) for s in jobs_scenarios],
)
def test_from_jenkins_tests(
capsys, mocker, default_config_file, execute_jenkins_jobs, scenario
):
def get_jobs():
return [{"fullname": fullname} for fullname in scenario.jobs]
scenarios = [
("all", dict(globs=[], found=["bam001", "bar001", "bar002", "baz001"])),
(
"some",
dict(
globs=["*am*", "*002", "bar001"], found=["bam001", "bar001", "bar002"]
),
),
]
mocker.patch("jenkins_jobs.builder.JenkinsManager.get_jobs", side_effect=get_jobs)
def test_list(self):
path = os.path.join(self.fixtures_path, "cmd-002.yaml")
args = ["--conf", default_config_file, "list"] + scenario.globs
execute_jenkins_jobs(args)
console_out = io.BytesIO()
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(
["--conf", self.default_config_file, "list", "-p", path] + self.globs
)
expected_out = "\n".join(scenario.found)
captured = capsys.readouterr()
assert captured.out.rstrip() == expected_out
self.assertEqual(
console_out.getvalue().decode("utf-8").rstrip(), ("\n".join(self.found))
)
YamlScenario = namedtuple("YamlScnenario", "name globs found")
yaml_scenarios = [
YamlScenario("all", globs=[], found=["bam001", "bar001", "bar002", "baz001"]),
YamlScenario(
"some",
globs=["*am*", "*002", "bar001"],
found=["bam001", "bar001", "bar002"],
),
]
@pytest.mark.parametrize(
"scenario",
[pytest.param(s, id=s.name) for s in yaml_scenarios],
)
def test_from_yaml_tests(
capsys, fixtures_dir, default_config_file, execute_jenkins_jobs, scenario
):
path = fixtures_dir / "cmd-002.yaml"
execute_jenkins_jobs(
["--conf", default_config_file, "list", "-p", str(path)] + scenario.globs
)
expected_out = "\n".join(scenario.found)
captured = capsys.readouterr()
assert captured.out.rstrip() == expected_out

View File

@ -18,295 +18,294 @@
# of actions by the JJB library, usually through interaction with the
# python-jenkins library.
import difflib
import filecmp
import io
import difflib
import os
import shutil
import tempfile
import yaml
from unittest import mock
import jenkins
from six.moves import StringIO
import testtools
import pytest
from testtools.assertions import assert_that
from jenkins_jobs.cli import entry
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch("jenkins_jobs.builder.JenkinsManager.get_plugins_info", mock.MagicMock)
class TestTests(CmdTestsBase):
def test_non_existing_job(self):
"""
Run test mode and pass a non-existing job name
(probably better to fail here)
"""
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"invalid",
]
self.execute_jenkins_jobs_with_args(args)
def test_valid_job(self):
"""
Run test mode and pass a valid job name
"""
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"foo-job",
]
console_out = io.BytesIO()
with mock.patch("sys.stdout", console_out):
self.execute_jenkins_jobs_with_args(args)
def test_console_output(self):
"""
Run test mode and verify that resulting XML gets sent to the console.
"""
console_out = io.BytesIO()
with mock.patch("sys.stdout", console_out):
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
self.assertEqual(console_out.getvalue().decode("utf-8"), xml_content)
def test_output_dir(self):
"""
Run test mode with output to directory and verify that output files are
generated.
"""
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
args = ["test", os.path.join(self.fixtures_path, "cmd-001.yaml"), "-o", tmpdir]
self.execute_jenkins_jobs_with_args(args)
self.expectThat(
os.path.join(tmpdir, "foo-job"), testtools.matchers.FileExists()
)
def test_output_dir_config_xml(self):
"""
Run test mode with output to directory in "config.xml" mode and verify
that output files are generated.
"""
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
args = [
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
"-o",
tmpdir,
"--config-xml",
]
self.execute_jenkins_jobs_with_args(args)
self.expectThat(
os.path.join(tmpdir, "foo-job", "config.xml"),
testtools.matchers.FileExists(),
)
def test_stream_input_output_no_encoding_exceed_recursion(self):
"""
Test that we don't have issues processing large number of jobs and
outputting the result if the encoding is not set.
"""
console_out = io.BytesIO()
input_file = os.path.join(self.fixtures_path, "large-number-of-jobs-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
console_out.encoding = None
with mock.patch("sys.stdin", f):
args = ["test"]
self.execute_jenkins_jobs_with_args(args)
def test_stream_input_output_utf8_encoding(self):
"""
Run test mode simulating using pipes for input and output using
utf-8 encoding
"""
console_out = io.BytesIO()
input_file = os.path.join(self.fixtures_path, "cmd-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
value = console_out.getvalue().decode("utf-8")
self.assertEqual(value, xml_content)
def test_stream_input_output_ascii_encoding(self):
"""
Run test mode simulating using pipes for input and output using
ascii encoding with unicode input
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
input_file = os.path.join(self.fixtures_path, "cmd-001.yaml")
with io.open(input_file, "r") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
self.execute_jenkins_jobs_with_args(args)
xml_content = io.open(
os.path.join(self.fixtures_path, "cmd-001.xml"), "r", encoding="utf-8"
).read()
value = console_out.getvalue().decode("ascii")
self.assertEqual(value, xml_content)
def test_stream_output_ascii_encoding_invalid_char(self):
"""
Run test mode simulating using pipes for input and output using
ascii encoding for output with include containing a character
that cannot be converted.
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
input_file = os.path.join(self.fixtures_path, "unicode001.yaml")
with io.open(input_file, "r", encoding="utf-8") as f:
with mock.patch("sys.stdout", console_out):
with mock.patch("sys.stdin", f):
args = ["--conf", self.default_config_file, "test"]
jenkins_jobs = entry.JenkinsJobs(args)
e = self.assertRaises(UnicodeError, jenkins_jobs.execute)
self.assertIn("'ascii' codec can't encode character", str(e))
@mock.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
@mock.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
def test_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Test handling of plugins_info stub option.
"""
plugins_info_stub_yaml_file = os.path.join(
self.fixtures_path, "plugins-info.yaml"
)
args = [
"--conf",
os.path.join(self.fixtures_path, "cmd-001.conf"),
"test",
"-p",
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
with io.open(plugins_info_stub_yaml_file, "r", encoding="utf-8") as yaml_file:
plugins_info_list = yaml.safe_load(yaml_file)
registry_mock.assert_called_with(mock.ANY, plugins_info_list)
@mock.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
@mock.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
def test_bogus_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
plugins_info_stub_yaml_file = os.path.join(
self.fixtures_path, "bogus-plugins-info.yaml"
)
args = [
"--conf",
os.path.join(self.fixtures_path, "cmd-001.conf"),
"test",
"-p",
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
stderr = StringIO()
with mock.patch("sys.stderr", stderr):
self.assertRaises(SystemExit, entry.JenkinsJobs, args)
self.assertIn("must contain a Yaml list", stderr.getvalue())
def test_non_existing_job(fixtures_dir, default_config_file, execute_jenkins_jobs):
"""
Run test mode and pass a non-existing job name
(probably better to fail here)
"""
args = [
"--conf",
default_config_file,
"test",
str(fixtures_dir / "cmd-001.yaml"),
"invalid",
]
execute_jenkins_jobs(args)
class TestJenkinsGetPluginInfoError(CmdTestsBase):
"""Test without mocking get_plugins_info.
def test_valid_job(fixtures_dir, default_config_file, execute_jenkins_jobs):
"""
Run test mode and pass a valid job name
"""
args = [
"--conf",
default_config_file,
"test",
str(fixtures_dir / "cmd-001.yaml"),
"foo-job",
]
execute_jenkins_jobs(args)
This test class is used for testing the 'test' subcommand when we want
to validate its behavior without mocking
jenkins_jobs.builder.JenkinsManager.get_plugins_info
def test_console_output(
capsys, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Run test mode and verify that resulting XML gets sent to the console.
"""
@mock.patch("jenkins.Jenkins.get_plugins")
def test_console_output_jenkins_connection_failure_warning(self, get_plugins_mock):
"""
Run test mode and verify that failed Jenkins connection attempt
exception does not bubble out of cmd.main. Ideally, we would also test
that an appropriate message is logged to stderr but it's somewhat
difficult to figure out how to actually enable stderr in this test
suite.
"""
args = [
"--conf",
default_config_file,
"test",
str(fixtures_dir / "cmd-001.yaml"),
]
execute_jenkins_jobs(args)
get_plugins_mock.side_effect = jenkins.JenkinsException("Connection refused")
with mock.patch("sys.stdout"):
try:
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
self.execute_jenkins_jobs_with_args(args)
except jenkins.JenkinsException:
self.fail("jenkins.JenkinsException propagated to main")
except Exception:
pass # only care about jenkins.JenkinsException for now
expected_output = fixtures_dir.joinpath("cmd-001.xml").read_text()
captured = capsys.readouterr()
assert captured.out == expected_output
@mock.patch("jenkins.Jenkins.get_plugins")
def test_skip_plugin_retrieval_if_no_config_provided(self, get_plugins_mock):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if no config file provided.
"""
with mock.patch("sys.stdout", new_callable=io.BytesIO):
args = [
"--conf",
self.default_config_file,
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
self.assertFalse(get_plugins_mock.called)
@mock.patch("jenkins.Jenkins.get_plugins_info")
def test_skip_plugin_retrieval_if_disabled(self, get_plugins_mock):
"""
Verify that retrieval of information from Jenkins instance about its
plugins will be skipped when run if a config file provided and disables
querying through a config option.
"""
with mock.patch("sys.stdout", new_callable=io.BytesIO):
args = [
"--conf",
os.path.join(self.fixtures_path, "disable-query-plugins.conf"),
"test",
os.path.join(self.fixtures_path, "cmd-001.yaml"),
]
entry.JenkinsJobs(args)
self.assertFalse(get_plugins_mock.called)
def test_output_dir(tmp_path, fixtures_dir, default_config_file, execute_jenkins_jobs):
"""
Run test mode with output to directory and verify that output files are
generated.
"""
args = ["test", str(fixtures_dir / "cmd-001.yaml"), "-o", str(tmp_path)]
execute_jenkins_jobs(args)
assert tmp_path.joinpath("foo-job").exists()
def test_output_dir_config_xml(tmp_path, fixtures_dir, execute_jenkins_jobs):
"""
Run test mode with output to directory in "config.xml" mode and verify
that output files are generated.
"""
args = [
"test",
str(fixtures_dir / "cmd-001.yaml"),
"-o",
str(tmp_path),
"--config-xml",
]
execute_jenkins_jobs(args)
assert tmp_path.joinpath("foo-job", "config.xml").exists()
def test_stream_input_output_no_encoding_exceed_recursion(
mocker, fixtures_dir, execute_jenkins_jobs
):
"""
Test that we don't have issues processing large number of jobs and
outputting the result if the encoding is not set.
"""
console_out = io.BytesIO()
console_out.encoding = None
mocker.patch("sys.stdout", console_out)
input = fixtures_dir.joinpath("large-number-of-jobs-001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["test"]
execute_jenkins_jobs(args)
def test_stream_input_output_utf8_encoding(
capsys, mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Run test mode simulating using pipes for input and output using
utf-8 encoding
"""
input = fixtures_dir.joinpath("cmd-001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["--conf", default_config_file, "test"]
execute_jenkins_jobs(args)
expected_output = fixtures_dir.joinpath("cmd-001.xml").read_text()
captured = capsys.readouterr()
assert captured.out == expected_output
def test_stream_input_output_ascii_encoding(
mocker, fixtures_dir, default_config_file, execute_jenkins_jobs
):
"""
Run test mode simulating using pipes for input and output using
ascii encoding with unicode input
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
mocker.patch("sys.stdout", console_out)
input = fixtures_dir.joinpath("cmd-001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["--conf", default_config_file, "test"]
execute_jenkins_jobs(args)
expected_output = fixtures_dir.joinpath("cmd-001.xml").read_text()
output = console_out.getvalue().decode("ascii")
assert output == expected_output
def test_stream_output_ascii_encoding_invalid_char(
mocker, fixtures_dir, default_config_file
):
"""
Run test mode simulating using pipes for input and output using
ascii encoding for output with include containing a character
that cannot be converted.
"""
console_out = io.BytesIO()
console_out.encoding = "ascii"
mocker.patch("sys.stdout", console_out)
input = fixtures_dir.joinpath("unicode001.yaml").read_bytes()
mocker.patch("sys.stdin", io.BytesIO(input))
args = ["--conf", default_config_file, "test"]
jenkins_jobs = entry.JenkinsJobs(args)
with pytest.raises(UnicodeError) as excinfo:
jenkins_jobs.execute()
assert "'ascii' codec can't encode character" in str(excinfo.value)
def test_plugins_info_stub_option(mocker, fixtures_dir, execute_jenkins_jobs):
"""
Test handling of plugins_info stub option.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
registry_mock = mocker.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
plugins_info_stub_yaml_file = fixtures_dir / "plugins-info.yaml"
args = [
"--conf",
str(fixtures_dir / "cmd-001.conf"),
"test",
"-p",
str(plugins_info_stub_yaml_file),
str(fixtures_dir / "cmd-001.yaml"),
]
execute_jenkins_jobs(args)
plugins_info_list = yaml.safe_load(plugins_info_stub_yaml_file.read_text())
registry_mock.assert_called_with(mock.ANY, plugins_info_list)
def test_bogus_plugins_info_stub_option(
capsys, mocker, fixtures_dir, default_config_file
):
"""
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
mocker.patch("jenkins_jobs.cli.subcommand.update.XmlJobGenerator.generateXML")
mocker.patch("jenkins_jobs.cli.subcommand.update.ModuleRegistry")
plugins_info_stub_yaml_file = fixtures_dir / "bogus-plugins-info.yaml"
args = [
"--conf",
str(fixtures_dir / "cmd-001.conf"),
"test",
"-p",
str(plugins_info_stub_yaml_file),
str(fixtures_dir / "cmd-001.yaml"),
]
with pytest.raises(SystemExit):
entry.JenkinsJobs(args)
captured = capsys.readouterr()
assert "must contain a Yaml list" in captured.err
# Test without mocking get_plugins_info.
#
# This test class is used for testing the 'test' subcommand when we want
# to validate its behavior without mocking
# jenkins_jobs.builder.JenkinsManager.get_plugins_info