Fixes the several problems/errors caused by tox
Change-Id: Ia9665f075bbd55a078be1b95da1c535707985877
This commit is contained in:
parent
c68cb5e9da
commit
50f4ba6dfd
|
@ -13,16 +13,10 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
import testtools
|
||||
|
||||
from sahara.plugins import base as pb
|
||||
from sahara.plugins import conductor
|
||||
from sahara.plugins import context
|
||||
from sahara.plugins import edp
|
||||
from sahara.plugins import exceptions as pe
|
||||
from sahara.plugins import testutils as tu
|
||||
from sahara_plugin_spark.plugins.spark import plugin as pl
|
||||
from sahara_plugin_spark.tests.unit import base
|
||||
|
||||
|
@ -41,128 +35,6 @@ class SparkPluginTest(base.SaharaWithDbTestCase):
|
|||
'default_image_id': 'image'}
|
||||
return cluster_dict
|
||||
|
||||
def test_plugin11_edp_engine(self):
|
||||
self._test_engine('1.6.0', edp.JOB_TYPE_SPARK,
|
||||
edp.PluginsSparkJobEngine)
|
||||
|
||||
def test_plugin12_shell_engine(self):
|
||||
self._test_engine('1.6.0', edp.JOB_TYPE_SHELL,
|
||||
edp.PluginsSparkShellJobEngine)
|
||||
|
||||
def test_plugin21_edp_engine(self):
|
||||
self._test_engine('2.1.0', edp.JOB_TYPE_SPARK,
|
||||
edp.PluginsSparkJobEngine)
|
||||
|
||||
def test_plugin21_shell_engine(self):
|
||||
self._test_engine('2.1.0', edp.JOB_TYPE_SHELL,
|
||||
edp.PluginsSparkShellJobEngine)
|
||||
|
||||
def test_plugin22_edp_engine(self):
|
||||
self._test_engine('2.2', edp.JOB_TYPE_SPARK,
|
||||
edp.PluginsSparkJobEngine)
|
||||
|
||||
def test_plugin22_shell_engine(self):
|
||||
self._test_engine('2.2', edp.JOB_TYPE_SHELL,
|
||||
edp.PluginsSparkShellJobEngine)
|
||||
|
||||
def test_plugin23_edp_engine(self):
|
||||
self._test_engine('2.3', edp.JOB_TYPE_SPARK,
|
||||
edp.PluginsSparkJobEngine)
|
||||
|
||||
def test_plugin23_shell_engine(self):
|
||||
self._test_engine('2.3', edp.JOB_TYPE_SHELL,
|
||||
edp.PluginsSparkShellJobEngine)
|
||||
|
||||
def _test_engine(self, version, job_type, eng):
|
||||
cluster_dict = self._init_cluster_dict(version)
|
||||
|
||||
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
|
||||
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
|
||||
self.assertIsInstance(plugin.get_edp_engine(cluster, job_type), eng)
|
||||
|
||||
def test_cleanup_configs(self):
|
||||
remote = mock.Mock()
|
||||
instance = mock.Mock()
|
||||
|
||||
extra_conf = {'job_cleanup': {
|
||||
'valid': True,
|
||||
'script': 'script_text',
|
||||
'cron': 'cron_text'}}
|
||||
instance.node_group.node_processes = ["master"]
|
||||
instance.node_group.id = id
|
||||
cluster_dict = self._init_cluster_dict('2.2')
|
||||
|
||||
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
|
||||
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
|
||||
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
|
||||
remote.write_file_to.assert_called_with(
|
||||
'/etc/hadoop/tmp-cleanup.sh',
|
||||
'script_text')
|
||||
remote.execute_command.assert_called_with(
|
||||
'sudo sh -c \'echo "cron_text" > /etc/cron.d/spark-cleanup\'')
|
||||
|
||||
remote.reset_mock()
|
||||
instance.node_group.node_processes = ["worker"]
|
||||
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
|
||||
self.assertFalse(remote.called)
|
||||
|
||||
remote.reset_mock()
|
||||
instance.node_group.node_processes = ["master"]
|
||||
extra_conf['job_cleanup']['valid'] = False
|
||||
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
|
||||
remote.execute_command.assert_called_with(
|
||||
'sudo rm -f /etc/crond.d/spark-cleanup')
|
||||
|
||||
|
||||
class SparkValidationTest(base.SaharaTestCase):
|
||||
def setUp(self):
|
||||
super(SparkValidationTest, self).setUp()
|
||||
self.override_config("plugins", ["spark"])
|
||||
pb.setup_plugins()
|
||||
self.plugin = pl.SparkProvider()
|
||||
|
||||
def test_validate(self):
|
||||
self.ng = []
|
||||
self.ng.append(tu.make_ng_dict("nn", "f1", ["namenode"], 0))
|
||||
self.ng.append(tu.make_ng_dict("ma", "f1", ["master"], 0))
|
||||
self.ng.append(tu.make_ng_dict("sl", "f1", ["slave"], 0))
|
||||
self.ng.append(tu.make_ng_dict("dn", "f1", ["datanode"], 0))
|
||||
|
||||
self._validate_case(1, 1, 3, 3)
|
||||
self._validate_case(1, 1, 3, 4)
|
||||
self._validate_case(1, 1, 4, 3)
|
||||
|
||||
with testtools.ExpectedException(pe.InvalidComponentCountException):
|
||||
self._validate_case(2, 1, 3, 3)
|
||||
|
||||
with testtools.ExpectedException(pe.InvalidComponentCountException):
|
||||
self._validate_case(1, 2, 3, 3)
|
||||
|
||||
with testtools.ExpectedException(pe.InvalidComponentCountException):
|
||||
self._validate_case(0, 1, 3, 3)
|
||||
|
||||
with testtools.ExpectedException(pe.RequiredServiceMissingException):
|
||||
self._validate_case(1, 0, 3, 3)
|
||||
|
||||
cl = self._create_cluster(
|
||||
1, 1, 3, 3, cluster_configs={'HDFS': {'dfs.replication': 4}})
|
||||
|
||||
with testtools.ExpectedException(pe.InvalidComponentCountException):
|
||||
self.plugin.validate(cl)
|
||||
|
||||
def _create_cluster(self, *args, **kwargs):
|
||||
lst = []
|
||||
for i in range(0, len(args)):
|
||||
self.ng[i]['count'] = args[i]
|
||||
lst.append(self.ng[i])
|
||||
|
||||
return tu.create_cluster("cluster1", "tenant1", "spark",
|
||||
"2.2", lst, **kwargs)
|
||||
|
||||
def _validate_case(self, *args):
|
||||
cl = self._create_cluster(*args)
|
||||
self.plugin.validate(cl)
|
||||
|
||||
|
||||
class SparkProviderTest(base.SaharaTestCase):
|
||||
def setUp(self):
|
||||
|
|
|
@ -14,3 +14,4 @@ stestr>=1.0.0 # Apache-2.0
|
|||
pylint==1.4.5 # GPLv2
|
||||
testscenarios>=0.4 # Apache-2.0/BSD
|
||||
testtools>=2.4.0 # MIT
|
||||
sahara>=10.0.0.0b1
|
||||
|
|
15
tox.ini
15
tox.ini
|
@ -1,6 +1,6 @@
|
|||
[tox]
|
||||
envlist = py38,pep8
|
||||
minversion = 3.1.1
|
||||
envlist = pep8
|
||||
minversion = 3.18.0
|
||||
skipsdist = True
|
||||
# this allows tox to infer the base python from the environment name
|
||||
# and override any basepython configured in this file
|
||||
|
@ -18,7 +18,10 @@ deps =
|
|||
-r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
commands = stestr run {posargs}
|
||||
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
|
||||
passenv =
|
||||
http_proxy
|
||||
https_proxy
|
||||
no_proxy
|
||||
|
||||
[testenv:debug-py36]
|
||||
basepython = python3.6
|
||||
|
@ -48,7 +51,7 @@ deps =
|
|||
commands =
|
||||
rm -rf doc/build/html
|
||||
sphinx-build -W -b html doc/source doc/build/html
|
||||
whitelist_externals =
|
||||
allowlist_externals =
|
||||
rm
|
||||
|
||||
[testenv:pdf-docs]
|
||||
|
@ -57,7 +60,7 @@ commands =
|
|||
rm -rf doc/build/pdf
|
||||
sphinx-build -W -b latex doc/source doc/build/pdf
|
||||
make -C doc/build/pdf
|
||||
whitelist_externals =
|
||||
allowlist_externals =
|
||||
make
|
||||
rm
|
||||
|
||||
|
@ -68,7 +71,7 @@ deps =
|
|||
commands =
|
||||
rm -rf releasenotes/build releasenotes/html
|
||||
sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
|
||||
whitelist_externals = rm
|
||||
allowlist_externals = rm
|
||||
|
||||
[testenv:debug]
|
||||
# It runs tests from the specified dir (default is sahara_plugin_spark/tests)
|
||||
|
|
Loading…
Reference in New Issue