Remove support for spark 1.0.0

It was deprecated in Liberty and is marked
for removal in Mitaka.

Change-Id: I3ba6941b1e1aa6900b5f59ea52a0370577729d9e
Implements: blueprint remove-spark-100
This commit is contained in:
Trevor McKay 2016-02-19 15:40:03 -05:00
parent 3f33ca2543
commit e00a2bbdf7
8 changed files with 26 additions and 73 deletions

View File

@ -0,0 +1,3 @@
---
deprecations:
- Removed support for the Spark 1.0.0 plugin.

View File

@ -1,20 +0,0 @@
{
"plugin_name": "spark",
"hadoop_version": "1.0.0",
"node_groups": [
{
"name": "slave",
"count": 3,
"node_group_template_id": "{spark-100-default-slave}"
},
{
"name": "master",
"count": 1,
"node_group_template_id": "{spark-100-default-master}"
}
],
"name": "spark-100-default-cluster",
"neutron_management_network": "{neutron_management_network}",
"cluster_configs": {},
"is_protected": true
}

View File

@ -1,14 +0,0 @@
{
"plugin_name": "spark",
"hadoop_version": "1.0.0",
"node_processes": [
"namenode",
"master"
],
"name": "spark-100-default-master",
"floating_ip_pool": "{floating_ip_pool}",
"flavor_id": "{flavor_id}",
"auto_security_group": "{auto_security_group}",
"security_groups": "{security_groups}",
"is_protected": true
}

View File

@ -1,14 +0,0 @@
{
"plugin_name": "spark",
"hadoop_version": "1.0.0",
"node_processes": [
"datanode",
"slave"
],
"name": "spark-100-default-slave",
"floating_ip_pool": "{floating_ip_pool}",
"flavor_id": "{flavor_id}",
"auto_security_group": "{auto_security_group}",
"security_groups": "{security_groups}",
"is_protected": true
}

View File

@ -26,7 +26,7 @@ from sahara.service.edp.spark import engine as edp_engine
class EdpEngine(edp_engine.SparkJobEngine):
edp_base_version = "1.0.0"
edp_base_version = "1.3.1"
def __init__(self, cluster):
super(EdpEngine, self).__init__(cluster)

View File

@ -20,7 +20,6 @@ from oslo_log import log as logging
from sahara import conductor
from sahara import context
from sahara import exceptions
from sahara.i18n import _
from sahara.i18n import _LI
from sahara.plugins import exceptions as ex
@ -60,7 +59,7 @@ class SparkProvider(p.ProvisioningPluginBase):
"CDH cluster without any management consoles.")
def get_versions(self):
return ['1.6.0', '1.3.1', '1.0.0']
return ['1.6.0', '1.3.1']
def get_configs(self, hadoop_version):
return c_helper.get_plugin_configs()
@ -69,11 +68,6 @@ class SparkProvider(p.ProvisioningPluginBase):
return self.processes
def validate(self, cluster):
if cluster.hadoop_version == "1.0.0":
raise exceptions.DeprecatedException(
_("Support for Spark version 1.0.0 is now deprecated and will"
" be removed in the 2016.1 release."))
nn_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "namenode")])
if nn_count != 1:

View File

@ -54,20 +54,24 @@ class SparkPluginTest(base.SaharaWithDbTestCase):
edp_engine = plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK)
with testtools.ExpectedException(
ex.InvalidDataException,
value_re="Spark 1.0.0 or higher required to run "
value_re="Spark 1.3.1 or higher required to run "
"Spark jobs\nError ID: .*"):
edp_engine.validate_job_execution(cluster, job, mock.Mock())
def test_plugin10_edp_engine(self):
self._test_engine('1.0.0', edp.JOB_TYPE_SPARK,
self._test_engine('1.3.1', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine)
def test_plugin09_shell_engine(self):
self._test_engine('0.9.1', edp.JOB_TYPE_SHELL,
def test_plugin10_shell_engine(self):
self._test_engine('1.3.1', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine)
def test_plugin10_shell_engine(self):
self._test_engine('1.0.0', edp.JOB_TYPE_SHELL,
def test_plugin11_edp_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine)
def test_plugin12_shell_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine)
def _test_engine(self, version, job_type, eng):
@ -100,7 +104,7 @@ class SparkPluginTest(base.SaharaWithDbTestCase):
'cron': 'cron_text'}}
instance.node_group.node_processes = ["master"]
instance.node_group.id = id
cluster_dict = self._init_cluster_dict('1.0.0')
cluster_dict = self._init_cluster_dict('1.3.1')
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
@ -132,26 +136,26 @@ class SparkProviderTest(base.SaharaTestCase):
provider = pl.SparkProvider()
res = provider.get_edp_job_types()
self.assertEqual([edp.JOB_TYPE_SHELL, edp.JOB_TYPE_SPARK],
res['1.0.0'])
self.assertEqual([edp.JOB_TYPE_SHELL, edp.JOB_TYPE_SPARK],
res['1.3.1'])
self.assertEqual([edp.JOB_TYPE_SHELL, edp.JOB_TYPE_SPARK],
res['1.6.0'])
def test_edp_config_hints(self):
provider = pl.SparkProvider()
res = provider.get_edp_config_hints(edp.JOB_TYPE_SHELL, "1.0.0")
self.assertEqual({'configs': {}, 'args': [], 'params': {}},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SHELL, "1.3.1")
self.assertEqual({'configs': {}, 'args': [], 'params': {}},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "1.0.0")
self.assertEqual({'args': [], 'configs': []},
res = provider.get_edp_config_hints(edp.JOB_TYPE_SHELL, "1.6.0")
self.assertEqual({'configs': {}, 'args': [], 'params': {}},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "1.3.1")
self.assertEqual({'args': [], 'configs': []},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "1.6.0")
self.assertEqual({'args': [], 'configs': []},
res['job_config'])

View File

@ -189,7 +189,7 @@ class TestJobExecCreateValidation(u.ValidationTestCase):
ng = tu.make_ng_dict('master', 42, [], 1,
instances=[tu.make_inst_dict('id', 'name')])
get_cluster.return_value = tu.create_cluster("cluster", "tenant1",
"spark", "1.0.0", [ng])
"spark", "1.3.1", [ng])
# Everything is okay, spark cluster supports EDP by default
# because cluster requires a master and slaves >= 1
@ -253,7 +253,7 @@ class TestJobExecCreateValidation(u.ValidationTestCase):
ng = tu.make_ng_dict('master', 42, ['namenode'], 1,
instances=[tu.make_inst_dict('id', 'name')])
cluster_get.return_value = tu.create_cluster("cluster", "tenant1",
"spark", "1.0.0", [ng])
"spark", "1.3.1", [ng])
self._assert_create_object_validation(
data={