Merge "Deprecate Vanilla 2.6.0"
This commit is contained in:
commit
a493326558
@ -1,6 +1,6 @@
|
||||
clusters:
|
||||
- plugin_name: vanilla
|
||||
plugin_version: 2.6.0
|
||||
plugin_version: 2.7.1
|
||||
image: ${vanilla_two_six_image}
|
||||
node_group_templates:
|
||||
- name: worker-dn-nm
|
||||
@ -55,7 +55,7 @@ clusters:
|
||||
node_configs:
|
||||
*ng_configs
|
||||
cluster_template:
|
||||
name: vanilla260
|
||||
name: vanilla271
|
||||
node_group_templates:
|
||||
master-rm-nn-hvs: 1
|
||||
master-oo-hs-sn: 1
|
||||
|
@ -12,8 +12,8 @@ network:
|
||||
|
||||
clusters:
|
||||
- plugin_name: vanilla
|
||||
plugin_version: 2.6.0
|
||||
image: sahara-juno-vanilla-2.6.0-ubuntu-14.04
|
||||
plugin_version: 2.7.1
|
||||
image: sahara-liberty-vanilla-2.7.1-ubuntu-14.04
|
||||
edp_jobs_flow: test_flow
|
||||
- plugin_name: hdp
|
||||
plugin_version: 2.0.6
|
||||
@ -48,7 +48,7 @@ edp_jobs_flow:
|
||||
- type: Java
|
||||
additional_libs:
|
||||
- type: database
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
|
||||
configs:
|
||||
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
|
||||
args:
|
||||
|
@ -29,7 +29,7 @@ A very simple cluster template JSON file might look like this:
|
||||
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_groups": [
|
||||
{
|
||||
"name": "master",
|
||||
@ -76,7 +76,7 @@ of a node group template file that uses substitution for *flavor_id*:
|
||||
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_processes": [
|
||||
"namenode",
|
||||
"resourcemanager",
|
||||
@ -139,13 +139,13 @@ in the third section:
|
||||
|
||||
.. code:: python
|
||||
|
||||
[vanilla_2.6.0_master]
|
||||
[vanilla_2.7.1_master]
|
||||
# This is named for the plugin, version, and template.
|
||||
# It may contain only node group template fields.
|
||||
flavor_id = 5
|
||||
image_id = b7883f8a-9a7f-42cc-89a2-d3c8b1cc7b28
|
||||
|
||||
[vanilla_2.6.0]
|
||||
[vanilla_2.7.1]
|
||||
# This is named for the plugin and version.
|
||||
# It may contain fields for both node group and cluster templates.
|
||||
flavor_id = 4
|
||||
@ -243,23 +243,23 @@ Create/update default templates bundled with Sahara for just the vanilla plugin:
|
||||
|
||||
$ sahara-templates --config-file myconfig update -t $TENANT_ID -p vanilla
|
||||
|
||||
Create/update default templates bundled with Sahara for just version 2.6.0
|
||||
Create/update default templates bundled with Sahara for just version 2.7.1
|
||||
of the vanilla plugin::
|
||||
|
||||
$ sahara-templates --config-file myconfig update -t $TENANT_ID -p vanilla -pv 2.6.0
|
||||
$ sahara-templates --config-file myconfig update -t $TENANT_ID -p vanilla -pv 2.7.1
|
||||
|
||||
Create/update default templates bundled with Sahara for just version 2.6.0
|
||||
Create/update default templates bundled with Sahara for just version 2.7.1
|
||||
of the vanilla plugin and version 2.0.6 of the hdp plugin::
|
||||
|
||||
$ sahara-templates --config-file myconfig update -t $TENANT_ID -p vanilla -pv vanilla.2.6.0 -p hdp -pv hdp.2.0.6
|
||||
$ sahara-templates --config-file myconfig update -t $TENANT_ID -p vanilla -pv vanilla.2.7.1 -p hdp -pv hdp.2.0.6
|
||||
|
||||
Delete default templates for the vanilla plugin::
|
||||
|
||||
$ sahara-templates --config-file myconfig delete -t $TENANT_ID -p vanilla
|
||||
|
||||
Delete default templates for version 2.6.0 of the vanilla plugin::
|
||||
Delete default templates for version 2.7.1 of the vanilla plugin::
|
||||
|
||||
$ sahara-templates --config-file myconfig delete -t $TENANT_ID -p vanilla -pv 2.6.0
|
||||
$ sahara-templates --config-file myconfig delete -t $TENANT_ID -p vanilla -pv 2.7.1
|
||||
|
||||
Delete a specific node group template by ID::
|
||||
|
||||
|
24
sahara/plugins/default_templates/vanilla/v2_7_1/cluster.json
Normal file
24
sahara/plugins/default_templates/vanilla/v2_7_1/cluster.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_groups": [
|
||||
{
|
||||
"name": "worker",
|
||||
"count": 3,
|
||||
"node_group_template_id": "{vanilla-271-default-worker}"
|
||||
},
|
||||
{
|
||||
"name": "secondary-master",
|
||||
"count": 1,
|
||||
"node_group_template_id": "{vanilla-271-default-secondary-master}"
|
||||
},
|
||||
{
|
||||
"name": "master",
|
||||
"count": 1,
|
||||
"node_group_template_id": "{vanilla-271-default-master}"
|
||||
}
|
||||
],
|
||||
"name": "vanilla-271-default-cluster",
|
||||
"neutron_management_network": "{neutron_management_network}",
|
||||
"cluster_configs": {}
|
||||
}
|
14
sahara/plugins/default_templates/vanilla/v2_7_1/master.json
Normal file
14
sahara/plugins/default_templates/vanilla/v2_7_1/master.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_processes": [
|
||||
"namenode",
|
||||
"resourcemanager",
|
||||
"hiveserver"
|
||||
],
|
||||
"name": "vanilla-271-default-master",
|
||||
"floating_ip_pool": "{floating_ip_pool}",
|
||||
"flavor_id": "{flavor_id}",
|
||||
"auto_security_group": "{auto_security_group}",
|
||||
"security_groups": "{security_groups}"
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_processes": [
|
||||
"secondarynamenode",
|
||||
"oozie",
|
||||
"historyserver"
|
||||
],
|
||||
"name": "vanilla-271-default-secondary-master",
|
||||
"floating_ip_pool": "{floating_ip_pool}",
|
||||
"flavor_id": "{flavor_id}",
|
||||
"auto_security_group": "{auto_security_group}",
|
||||
"security_groups": "{security_groups}"
|
||||
}
|
13
sahara/plugins/default_templates/vanilla/v2_7_1/worker.json
Normal file
13
sahara/plugins/default_templates/vanilla/v2_7_1/worker.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_processes": [
|
||||
"nodemanager",
|
||||
"datanode"
|
||||
],
|
||||
"name": "vanilla-271-default-worker",
|
||||
"floating_ip_pool": "{floating_ip_pool}",
|
||||
"flavor_id": "{flavor_id}",
|
||||
"auto_security_group": "{auto_security_group}",
|
||||
"security_groups": "{security_groups}"
|
||||
}
|
@ -18,6 +18,8 @@ from oslo_log import log as logging
|
||||
|
||||
from sahara import conductor
|
||||
from sahara import context
|
||||
from sahara import exceptions as ex
|
||||
from sahara.i18n import _
|
||||
from sahara.plugins import utils
|
||||
from sahara.plugins.vanilla import abstractversionhandler as avm
|
||||
from sahara.plugins.vanilla.hadoop2 import config as c
|
||||
@ -57,7 +59,10 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||
}
|
||||
|
||||
def validate(self, cluster):
|
||||
vl.validate_cluster_creating(self.pctx, cluster)
|
||||
raise ex.DeprecatedException(
|
||||
_("The vanilla 2.6.0 plugin is now deprecated and will be removed"
|
||||
" in M release. The vanilla 2.7.1 plugin remains and "
|
||||
" continues to be supported."))
|
||||
|
||||
def update_infra(self, cluster):
|
||||
pass
|
||||
|
@ -20,14 +20,14 @@ or, if the file is a YAML Mako template:
|
||||
|
||||
.. sourcecode:: console
|
||||
|
||||
$ tox -e scenario -- -V templatevars.ini etc/scenario/sahara-ci/scenario/vanilla-2.6.0.yaml.mako
|
||||
$ tox -e scenario -- -V templatevars.ini etc/scenario/sahara-ci/scenario/vanilla-2.7.1.yaml.mako
|
||||
..
|
||||
|
||||
where templatevars.ini contains the values of the variables referenced
|
||||
by ``vanilla-2.6.0.yaml.mako``.
|
||||
by ``vanilla-2.7.1.yaml.mako``.
|
||||
|
||||
For example, you want to run tests for the Vanilla plugin with the Hadoop
|
||||
version 2.6.0 In this case you should create ``templatevars.ini`` with
|
||||
version 2.7.1 In this case you should create ``templatevars.ini`` with
|
||||
the appropriate values (see the section `Variables and sahara-ci templates`_)
|
||||
and use the following tox env:
|
||||
|
||||
@ -41,20 +41,20 @@ should use the several YAML and/or YAML Mako template files:
|
||||
|
||||
.. sourcecode:: console
|
||||
|
||||
$ tox -e scenario -- -V templatevars.ini etc/scenario/sahara-ci/vanilla-1.2.1.yaml.mako etc/scenario/sahara-ci/vanilla-2.6.0.yaml.mako ...
|
||||
$ tox -e scenario -- -V templatevars.ini etc/scenario/sahara-ci/vanilla-1.2.1.yaml.mako etc/scenario/sahara-ci/vanilla-2.7.1.yaml.mako ...
|
||||
..
|
||||
|
||||
Here are a few more examples.
|
||||
|
||||
.. sourcecode:: console
|
||||
|
||||
$ tox -e scenario -- -V templatevars.ini etc/scenario/sahara-ci/credentials.yaml.mako etc/scenario/sahara-ci/vanilla-2.6.0.yaml.mako
|
||||
$ tox -e scenario -- -V templatevars.ini etc/scenario/sahara-ci/credentials.yaml.mako etc/scenario/sahara-ci/vanilla-2.7.1.yaml.mako
|
||||
|
||||
..
|
||||
|
||||
will run tests for Vanilla plugin with the Hadoop version 2.6.0 and credential
|
||||
will run tests for Vanilla plugin with the Hadoop version 2.7.1 and credential
|
||||
located in ``etc/scenario/sahara-ci/credential.yaml.mako``, replacing the variables
|
||||
included into ``vanilla-2.6.0.yaml.mako`` with the values defined into
|
||||
included into ``vanilla-2.7.1.yaml.mako`` with the values defined into
|
||||
``templatevars.ini``.
|
||||
For more information about writing scenario YAML files, see the section
|
||||
section `How to write scenario files`_.
|
||||
|
@ -81,7 +81,7 @@ class TestBase(testtools.TestCase):
|
||||
'ssl_cert': '/etc/tests/cert.crt',
|
||||
'ssl_verify': True}
|
||||
self.base_scenario.plugin_opts = {'plugin_name': 'vanilla',
|
||||
'hadoop_version': '2.6.0'}
|
||||
'hadoop_version': '2.7.1'}
|
||||
self.base_scenario.network = {'type': 'neutron',
|
||||
'private_network': 'changed_private',
|
||||
'public_network': 'changed_public',
|
||||
@ -142,7 +142,7 @@ class TestBase(testtools.TestCase):
|
||||
self.base_scenario.ng_name_map = {}
|
||||
self.base_scenario.key_name = 'test_key'
|
||||
self.base_scenario.template_path = ('sahara/tests/scenario/templates/'
|
||||
'vanilla/2.6.0')
|
||||
'vanilla/2.7.1')
|
||||
self.job = self.base_scenario.testcase["edp_jobs_flow"].get(
|
||||
'test_flow')[0]
|
||||
self.base_scenario.cluster_id = 'some_id'
|
||||
@ -260,7 +260,7 @@ class TestBase(testtools.TestCase):
|
||||
self.base_scenario._init_clients()
|
||||
self.assertEqual('internal-db://id_internal_db_data',
|
||||
self.base_scenario._create_internal_db_data(
|
||||
'sahara/tests/scenario_unit/vanilla2_6_0.yaml'))
|
||||
'sahara/tests/scenario_unit/vanilla2_7_1.yaml'))
|
||||
|
||||
@mock.patch('swiftclient.client.Connection.put_container',
|
||||
return_value=None)
|
||||
|
@ -34,8 +34,8 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
"clusters": [
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"plugin_version": "2.6.0",
|
||||
"image": "sahara-vanilla-2.6.0-ubuntu-14.04"
|
||||
"plugin_version": "2.7.1",
|
||||
"image": "sahara-vanilla-2.7.1-ubuntu-14.04"
|
||||
}],
|
||||
}
|
||||
|
||||
@ -64,12 +64,12 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
expected_default_cluster = {
|
||||
"clusters": [
|
||||
{
|
||||
"image": "sahara-vanilla-2.6.0-ubuntu-14.04",
|
||||
"image": "sahara-vanilla-2.7.1-ubuntu-14.04",
|
||||
"edp_jobs_flow": None,
|
||||
"class_name": "vanilla2_6_0",
|
||||
"class_name": "vanilla2_7_1",
|
||||
"plugin_name": "vanilla",
|
||||
"scenario": ['run_jobs', 'scale', 'run_jobs'],
|
||||
"plugin_version": "2.6.0",
|
||||
"plugin_version": "2.7.1",
|
||||
"retain_resources": False,
|
||||
}],
|
||||
}
|
||||
@ -101,8 +101,8 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
"clusters": [
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"plugin_version": "2.6.0",
|
||||
"image": "sahara-vanilla-2.6.0-ubuntu-14.04",
|
||||
"plugin_version": "2.7.1",
|
||||
"image": "sahara-vanilla-2.7.1-ubuntu-14.04",
|
||||
"edp_jobs_flow": "test_flow",
|
||||
"retain_resources": True
|
||||
}],
|
||||
@ -167,8 +167,8 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
"clusters": [
|
||||
{
|
||||
"plugin_name": "vanilla",
|
||||
"plugin_version": "2.6.0",
|
||||
"image": "sahara-vanilla-2.6.0-ubuntu-14.04",
|
||||
"plugin_version": "2.7.1",
|
||||
"image": "sahara-vanilla-2.7.1-ubuntu-14.04",
|
||||
"retain_resources": True,
|
||||
'edp_jobs_flow': [
|
||||
{
|
||||
@ -202,7 +202,7 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
}
|
||||
],
|
||||
"scenario": ['run_jobs', 'scale', 'run_jobs'],
|
||||
"class_name": "vanilla2_6_0"
|
||||
"class_name": "vanilla2_7_1"
|
||||
}],
|
||||
}
|
||||
|
||||
@ -219,14 +219,14 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
@mock.patch('os.system', return_value=None)
|
||||
def test_runner_main(self, mock_os, mock_sys):
|
||||
sys.argv = ['sahara/tests/scenario/runner.py',
|
||||
'sahara/tests/scenario_unit/vanilla2_6_0.yaml']
|
||||
'sahara/tests/scenario_unit/vanilla2_7_1.yaml']
|
||||
runner.main()
|
||||
|
||||
@mock.patch('sys.exit', return_value=None)
|
||||
@mock.patch('os.system', return_value=None)
|
||||
def test_runner_template_missing_varfile(self, mock_os, mock_sys):
|
||||
sys.argv = ['sahara/tests/scenario/runner.py',
|
||||
'sahara/tests/scenario_unit/vanilla2_6_0.yaml.mako']
|
||||
'sahara/tests/scenario_unit/vanilla2_7_1.yaml.mako']
|
||||
self.assertRaises(NameError, runner.main)
|
||||
|
||||
@mock.patch('sys.exit', return_value=None)
|
||||
@ -235,7 +235,7 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
sys.argv = ['sahara/tests/scenario/runner.py',
|
||||
'-V',
|
||||
'sahara/tests/scenario_unit/templatevars_nodefault.ini',
|
||||
'sahara/tests/scenario_unit/vanilla2_6_0.yaml.mako']
|
||||
'sahara/tests/scenario_unit/vanilla2_7_1.yaml.mako']
|
||||
self.assertRaises(NameError, runner.main)
|
||||
|
||||
@mock.patch('sys.exit', return_value=None)
|
||||
@ -244,7 +244,7 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
sys.argv = ['sahara/tests/scenario/runner.py',
|
||||
'-V',
|
||||
'sahara/tests/scenario_unit/templatevars_incomplete.ini',
|
||||
'sahara/tests/scenario_unit/vanilla2_6_0.yaml.mako']
|
||||
'sahara/tests/scenario_unit/vanilla2_7_1.yaml.mako']
|
||||
self.assertRaises(NameError, runner.main)
|
||||
|
||||
@mock.patch('sys.exit', return_value=None)
|
||||
@ -253,5 +253,5 @@ class RunnerUnitTest(testtools.TestCase):
|
||||
sys.argv = ['sahara/tests/scenario/runner.py',
|
||||
'-V',
|
||||
'sahara/tests/scenario_unit/templatevars_complete.ini',
|
||||
'sahara/tests/scenario_unit/vanilla2_6_0.yaml.mako']
|
||||
'sahara/tests/scenario_unit/vanilla2_7_1.yaml.mako']
|
||||
runner.main()
|
||||
|
@ -21,7 +21,7 @@ from sahara.tests.scenario import validation
|
||||
|
||||
class TestValidation(testtools.TestCase):
|
||||
def test_validation(self):
|
||||
with open("sahara/tests/scenario_unit/vanilla2_6_0.yaml",
|
||||
with open("sahara/tests/scenario_unit/vanilla2_7_1.yaml",
|
||||
"r") as yaml_file:
|
||||
config = yaml.load(yaml_file)
|
||||
self.assertIsNone(validation.validate(config))
|
||||
|
@ -14,8 +14,8 @@ network:
|
||||
|
||||
clusters:
|
||||
- plugin_name: vanilla
|
||||
plugin_version: 2.6.0
|
||||
image: sahara-vanilla-2.6.0-ubuntu-14.04
|
||||
plugin_version: 2.7.1
|
||||
image: sahara-vanilla-2.7.1-ubuntu-14.04
|
||||
node_group_templates:
|
||||
- name: master
|
||||
node_processes:
|
||||
@ -63,7 +63,7 @@ edp_jobs_flow:
|
||||
- type: Java
|
||||
additional_libs:
|
||||
- type: database
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
|
||||
configs:
|
||||
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
|
||||
args:
|
@ -15,7 +15,7 @@ network:
|
||||
|
||||
clusters:
|
||||
- plugin_name: vanilla
|
||||
plugin_version: 2.6.0
|
||||
plugin_version: 2.7.1
|
||||
image: ${vanilla_two_six_image}
|
||||
node_group_templates:
|
||||
- name: master
|
||||
@ -64,7 +64,7 @@ edp_jobs_flow:
|
||||
- type: Java
|
||||
additional_libs:
|
||||
- type: database
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
|
||||
configs:
|
||||
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
|
||||
args:
|
@ -29,7 +29,7 @@ from sahara.tests.unit.db.templates import common as c
|
||||
|
||||
cluster_json = {
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_groups": [
|
||||
{
|
||||
"name": "worker",
|
||||
@ -49,7 +49,7 @@ cluster_json = {
|
||||
|
||||
master_json = {
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_processes": [
|
||||
"namenode",
|
||||
"resourcemanager",
|
||||
@ -64,7 +64,7 @@ master_json = {
|
||||
|
||||
worker_json = {
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
"hadoop_version": "2.7.1",
|
||||
"node_processes": [
|
||||
"nodemanager",
|
||||
"datanode"
|
||||
@ -262,7 +262,7 @@ class TemplateUpdateTestCase(base.ConductorManagerTestCase):
|
||||
|
||||
# Plugin name/version filtering applied
|
||||
option_values = {"plugin_name": "vanilla",
|
||||
"plugin_version": "2.6.0"}
|
||||
"plugin_version": "2.7.1"}
|
||||
template_api.set_conf(Config(option_values))
|
||||
ng_templates, cl_templates = template_api.process_files(tempdir, files)
|
||||
self.assertEqual(1, len(cl_templates))
|
||||
@ -276,7 +276,7 @@ class TemplateUpdateTestCase(base.ConductorManagerTestCase):
|
||||
self.assertEqual(0, len(ng_templates))
|
||||
|
||||
option_values = {"plugin_name": "hdp",
|
||||
"plugin_version": "2.6.0"}
|
||||
"plugin_version": "2.7.1"}
|
||||
template_api.set_conf(Config(option_values))
|
||||
ng_templates, cl_templates = template_api.process_files(tempdir, files)
|
||||
self.assertEqual(0, len(cl_templates))
|
||||
|
@ -59,29 +59,29 @@ class TemplateUtilsTestCase(base.ConductorManagerTestCase):
|
||||
def test_check_plugin_version(self):
|
||||
|
||||
template = {"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0"}
|
||||
"hadoop_version": "2.7.1"}
|
||||
|
||||
self.assertTrue(utils.check_plugin_version(template, None))
|
||||
self.assertTrue(utils.check_plugin_version(template, ["2.6.0"]))
|
||||
self.assertTrue(utils.check_plugin_version(template, ["2.7.1"]))
|
||||
self.assertTrue(utils.check_plugin_version(template,
|
||||
["vanilla.2.6.0"]))
|
||||
["vanilla.2.7.1"]))
|
||||
self.assertFalse(utils.check_plugin_version(template, ["1.2.1"]))
|
||||
|
||||
def test_check_plugin_name_and_version(self):
|
||||
|
||||
template = {"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0"}
|
||||
"hadoop_version": "2.7.1"}
|
||||
|
||||
self.assertTrue(utils.check_plugin_name_and_version(
|
||||
template, None, ["2.6.0"]))
|
||||
template, None, ["2.7.1"]))
|
||||
self.assertTrue(utils.check_plugin_name_and_version(
|
||||
template, ["vanilla"], None))
|
||||
self.assertTrue(utils.check_plugin_name_and_version(
|
||||
template, ["vanilla"], ["2.6.0"]))
|
||||
template, ["vanilla"], ["2.7.1"]))
|
||||
self.assertTrue(utils.check_plugin_name_and_version(
|
||||
template, ["vanilla"], ["vanilla.2.6.0"]))
|
||||
template, ["vanilla"], ["vanilla.2.7.1"]))
|
||||
self.assertFalse(utils.check_plugin_name_and_version(
|
||||
template, ["hdp"], ["2.6.0"]))
|
||||
template, ["hdp"], ["2.7.1"]))
|
||||
|
||||
def test_check_node_group_template_usage(self):
|
||||
|
||||
|
@ -32,7 +32,7 @@ class VanillaPluginTest(base.SaharaWithDbTestCase):
|
||||
|
||||
@mock.patch('sahara.service.edp.hdfs_helper.create_dir_hadoop2')
|
||||
def test_edp_calls_hadoop2_create_dir(self, create_dir):
|
||||
for version in ['2.6.0']:
|
||||
for version in ['2.7.1']:
|
||||
cluster_dict = {
|
||||
'name': 'cluster' + version.replace('.', '_'),
|
||||
'plugin_name': 'vanilla',
|
||||
|
@ -84,7 +84,7 @@ class ValidationTest(base.SaharaTestCase):
|
||||
lst.append(self.ng[i])
|
||||
|
||||
return tu.create_cluster("cluster1", "tenant1", "vanilla",
|
||||
"2.6.0", lst, **kwargs)
|
||||
"2.7.1", lst, **kwargs)
|
||||
|
||||
def _validate_case(self, *args):
|
||||
cl = self._create_cluster(*args)
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import mock
|
||||
|
||||
from sahara.plugins.vanilla.v2_6_0 import edp_engine
|
||||
from sahara.plugins.vanilla.v2_7_1 import edp_engine
|
||||
from sahara.tests.unit import base as sahara_base
|
||||
from sahara.utils import edp
|
||||
|
||||
@ -31,7 +31,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
|
||||
edp.JOB_TYPE_HIVE)
|
||||
get_possible_hive_config_from.assert_called_once_with(
|
||||
'plugins/vanilla/v2_6_0/resources/hive-default.xml')
|
||||
'plugins/vanilla/v2_7_1/resources/hive-default.xml')
|
||||
self.assertEqual(expected_config, actual_config)
|
||||
|
||||
@mock.patch('sahara.plugins.vanilla.hadoop2.edp_engine.EdpOozieEngine')
|
||||
@ -55,7 +55,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
|
||||
edp.JOB_TYPE_MAPREDUCE)
|
||||
get_possible_mapreduce_config_from.assert_called_once_with(
|
||||
'plugins/vanilla/v2_6_0/resources/mapred-default.xml')
|
||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')
|
||||
self.assertEqual(expected_config, actual_config)
|
||||
|
||||
@mock.patch(
|
||||
@ -68,7 +68,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
|
||||
edp.JOB_TYPE_MAPREDUCE_STREAMING)
|
||||
get_possible_mapreduce_config_from.assert_called_once_with(
|
||||
'plugins/vanilla/v2_6_0/resources/mapred-default.xml')
|
||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')
|
||||
self.assertEqual(expected_config, actual_config)
|
||||
|
||||
@mock.patch(
|
||||
@ -81,7 +81,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
|
||||
edp.JOB_TYPE_PIG)
|
||||
get_possible_pig_config_from.assert_called_once_with(
|
||||
'plugins/vanilla/v2_6_0/resources/mapred-default.xml')
|
||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')
|
||||
self.assertEqual(expected_config, actual_config)
|
||||
|
||||
@mock.patch('sahara.plugins.vanilla.hadoop2.edp_engine.EdpOozieEngine')
|
Loading…
Reference in New Issue
Block a user