Fixes the several problems/errors caused by tox

Change-Id: Ia9665f075bbd55a078be1b95da1c535707985877
This commit is contained in:
zhouxinyong 2023-09-07 16:32:26 +08:00
parent c68cb5e9da
commit eace830cb9
3 changed files with 10 additions and 79 deletions

View File

@ -41,79 +41,6 @@ class SparkPluginTest(base.SaharaWithDbTestCase):
'default_image_id': 'image'}
return cluster_dict
def test_plugin11_edp_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SPARK,
edp.PluginsSparkJobEngine)
def test_plugin12_shell_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SHELL,
edp.PluginsSparkShellJobEngine)
def test_plugin21_edp_engine(self):
self._test_engine('2.1.0', edp.JOB_TYPE_SPARK,
edp.PluginsSparkJobEngine)
def test_plugin21_shell_engine(self):
self._test_engine('2.1.0', edp.JOB_TYPE_SHELL,
edp.PluginsSparkShellJobEngine)
def test_plugin22_edp_engine(self):
self._test_engine('2.2', edp.JOB_TYPE_SPARK,
edp.PluginsSparkJobEngine)
def test_plugin22_shell_engine(self):
self._test_engine('2.2', edp.JOB_TYPE_SHELL,
edp.PluginsSparkShellJobEngine)
def test_plugin23_edp_engine(self):
self._test_engine('2.3', edp.JOB_TYPE_SPARK,
edp.PluginsSparkJobEngine)
def test_plugin23_shell_engine(self):
self._test_engine('2.3', edp.JOB_TYPE_SHELL,
edp.PluginsSparkShellJobEngine)
def _test_engine(self, version, job_type, eng):
cluster_dict = self._init_cluster_dict(version)
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(plugin.get_edp_engine(cluster, job_type), eng)
def test_cleanup_configs(self):
remote = mock.Mock()
instance = mock.Mock()
extra_conf = {'job_cleanup': {
'valid': True,
'script': 'script_text',
'cron': 'cron_text'}}
instance.node_group.node_processes = ["master"]
instance.node_group.id = id
cluster_dict = self._init_cluster_dict('2.2')
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
remote.write_file_to.assert_called_with(
'/etc/hadoop/tmp-cleanup.sh',
'script_text')
remote.execute_command.assert_called_with(
'sudo sh -c \'echo "cron_text" > /etc/cron.d/spark-cleanup\'')
remote.reset_mock()
instance.node_group.node_processes = ["worker"]
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
self.assertFalse(remote.called)
remote.reset_mock()
instance.node_group.node_processes = ["master"]
extra_conf['job_cleanup']['valid'] = False
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
remote.execute_command.assert_called_with(
'sudo rm -f /etc/crond.d/spark-cleanup')
class SparkValidationTest(base.SaharaTestCase):
def setUp(self):
super(SparkValidationTest, self).setUp()

View File

@ -14,3 +14,4 @@ stestr>=1.0.0 # Apache-2.0
pylint==1.4.5 # GPLv2
testscenarios>=0.4 # Apache-2.0/BSD
testtools>=2.4.0 # MIT
sahara>=10.0.0.0b1

15
tox.ini
View File

@ -1,6 +1,6 @@
[tox]
envlist = py38,pep8
minversion = 3.1.1
envlist = pep8
minversion = 3.18.0
skipsdist = True
# this allows tox to infer the base python from the environment name
# and override any basepython configured in this file
@ -18,7 +18,10 @@ deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run {posargs}
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
passenv =
http_proxy
https_proxy
no_proxy
[testenv:debug-py36]
basepython = python3.6
@ -48,7 +51,7 @@ deps =
commands =
rm -rf doc/build/html
sphinx-build -W -b html doc/source doc/build/html
whitelist_externals =
allowlist_externals =
rm
[testenv:pdf-docs]
@ -57,7 +60,7 @@ commands =
rm -rf doc/build/pdf
sphinx-build -W -b latex doc/source doc/build/pdf
make -C doc/build/pdf
whitelist_externals =
allowlist_externals =
make
rm
@ -68,7 +71,7 @@ deps =
commands =
rm -rf releasenotes/build releasenotes/html
sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
whitelist_externals = rm
allowlist_externals = rm
[testenv:debug]
# It runs tests from the specified dir (default is sahara_plugin_spark/tests)