Raise better exception for Spark master validation

Previously RequiredServiceMissingException was raised for
all cases of invalid count for Spark master, even if the
service was present but count was otherwise incorrect. Now
the correct exception will be raised depending on whether
master is missing or master has wrong count.

Also, add missing unit tests for validation.

Change-Id: Ide7cc8414086e377c86306630920a3c753811458
This commit is contained in:
Jeremy Freudberg 2017-04-05 15:59:19 -04:00
parent dd431ebdeb
commit c4c776d708
2 changed files with 56 additions and 1 deletions

View File

@ -102,9 +102,13 @@ class SparkProvider(p.ProvisioningPluginBase):
sm_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "master")])
if sm_count != 1:
if sm_count < 1:
raise ex.RequiredServiceMissingException("Spark master")
if sm_count >= 2:
raise ex.InvalidComponentCountException("Spark master", "1",
sm_count)
sl_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "slave")])

View File

@ -20,9 +20,11 @@ from sahara import conductor as cond
from sahara import context
from sahara import exceptions as ex
from sahara.plugins import base as pb
from sahara.plugins import exceptions as pe
from sahara.plugins.spark import plugin as pl
from sahara.service.edp.spark import engine
from sahara.tests.unit import base
from sahara.tests.unit import testutils as tu
from sahara.utils import edp
@ -128,6 +130,55 @@ class SparkPluginTest(base.SaharaWithDbTestCase):
'sudo rm -f /etc/crond.d/spark-cleanup')
class SparkValidationTest(base.SaharaTestCase):
def setUp(self):
super(SparkValidationTest, self).setUp()
pb.setup_plugins()
self.plugin = pl.SparkProvider()
def test_validate(self):
self.ng = []
self.ng.append(tu.make_ng_dict("nn", "f1", ["namenode"], 0))
self.ng.append(tu.make_ng_dict("ma", "f1", ["master"], 0))
self.ng.append(tu.make_ng_dict("sl", "f1", ["slave"], 0))
self.ng.append(tu.make_ng_dict("dn", "f1", ["datanode"], 0))
self._validate_case(1, 1, 3, 3)
self._validate_case(1, 1, 3, 4)
self._validate_case(1, 1, 4, 3)
with testtools.ExpectedException(pe.InvalidComponentCountException):
self._validate_case(2, 1, 3, 3)
with testtools.ExpectedException(pe.InvalidComponentCountException):
self._validate_case(1, 2, 3, 3)
with testtools.ExpectedException(pe.InvalidComponentCountException):
self._validate_case(0, 1, 3, 3)
with testtools.ExpectedException(pe.RequiredServiceMissingException):
self._validate_case(1, 0, 3, 3)
cl = self._create_cluster(
1, 1, 3, 3, cluster_configs={'HDFS': {'dfs.replication': 4}})
with testtools.ExpectedException(pe.InvalidComponentCountException):
self.plugin.validate(cl)
def _create_cluster(self, *args, **kwargs):
lst = []
for i in range(0, len(args)):
self.ng[i]['count'] = args[i]
lst.append(self.ng[i])
return tu.create_cluster("cluster1", "tenant1", "spark",
"1.60", lst, **kwargs)
def _validate_case(self, *args):
cl = self._create_cluster(*args)
self.plugin.validate(cl)
class SparkProviderTest(base.SaharaTestCase):
def setUp(self):
super(SparkProviderTest, self).setUp()