[Sahara] Improved provisioning plugin support
CDH plugin now has correct service names. HDP plugin now supports version 2.2 Vanilla plugin now supports version 2.6.0 The config dictionaries moved to a separate file for better screnario readability. Change-Id: I63864daae851dd9500e2127cc821a19b0f46b35a
This commit is contained in:
parent
2efe0cb9ed
commit
1e516d32bf
120
rally/benchmark/scenarios/sahara/consts.py
Normal file
120
rally/benchmark/scenarios/sahara/consts.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
# Copyright 2015: Mirantis Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
NODE_PROCESSES = {
|
||||||
|
"vanilla": {
|
||||||
|
"1.2.1": {
|
||||||
|
"master": ["namenode", "jobtracker", "oozie"],
|
||||||
|
"worker": ["datanode", "tasktracker"]
|
||||||
|
},
|
||||||
|
"2.3.0": {
|
||||||
|
"master": ["namenode", "resourcemanager", "historyserver",
|
||||||
|
"oozie"],
|
||||||
|
"worker": ["datanode", "nodemanager"]
|
||||||
|
},
|
||||||
|
"2.4.1": {
|
||||||
|
"master": ["namenode", "resourcemanager", "historyserver",
|
||||||
|
"oozie"],
|
||||||
|
"worker": ["datanode", "nodemanager"]
|
||||||
|
},
|
||||||
|
"2.6.0": {
|
||||||
|
"master": ["namenode", "resourcemanager", "historyserver",
|
||||||
|
"oozie"],
|
||||||
|
"worker": ["datanode", "nodemanager"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"hdp": {
|
||||||
|
"1.3.2": {
|
||||||
|
"master": ["JOBTRACKER", "NAMENODE", "SECONDARY_NAMENODE",
|
||||||
|
"GANGLIA_SERVER", "NAGIOS_SERVER",
|
||||||
|
"AMBARI_SERVER", "OOZIE_SERVER"],
|
||||||
|
"worker": ["TASKTRACKER", "DATANODE", "HDFS_CLIENT",
|
||||||
|
"MAPREDUCE_CLIENT", "OOZIE_CLIENT", "PIG"]
|
||||||
|
},
|
||||||
|
"2.0.6": {
|
||||||
|
"manager": ["AMBARI_SERVER", "GANGLIA_SERVER",
|
||||||
|
"NAGIOS_SERVER"],
|
||||||
|
"master": ["NAMENODE", "SECONDARY_NAMENODE",
|
||||||
|
"ZOOKEEPER_SERVER", "ZOOKEEPER_CLIENT",
|
||||||
|
"HISTORYSERVER", "RESOURCEMANAGER",
|
||||||
|
"OOZIE_SERVER"],
|
||||||
|
"worker": ["DATANODE", "HDFS_CLIENT", "ZOOKEEPER_CLIENT",
|
||||||
|
"PIG", "MAPREDUCE2_CLIENT", "YARN_CLIENT",
|
||||||
|
"NODEMANAGER", "OOZIE_CLIENT"]
|
||||||
|
},
|
||||||
|
"2.2": {
|
||||||
|
"manager": ["AMBARI_SERVER", "GANGLIA_SERVER",
|
||||||
|
"NAGIOS_SERVER"],
|
||||||
|
"master": ["NAMENODE", "SECONDARY_NAMENODE",
|
||||||
|
"ZOOKEEPER_SERVER", "ZOOKEEPER_CLIENT",
|
||||||
|
"HISTORYSERVER", "RESOURCEMANAGER",
|
||||||
|
"OOZIE_SERVER"],
|
||||||
|
"worker": ["DATANODE", "HDFS_CLIENT", "ZOOKEEPER_CLIENT",
|
||||||
|
"PIG", "MAPREDUCE2_CLIENT", "YARN_CLIENT",
|
||||||
|
"NODEMANAGER", "OOZIE_CLIENT", "TEZ_CLIENT"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cdh": {
|
||||||
|
"5": {
|
||||||
|
"manager": ["CLOUDERA_MANAGER"],
|
||||||
|
"master": ["HDFS_NAMENODE", "YARN_RESOURCEMANAGER",
|
||||||
|
"OOZIE_SERVER", "YARN_JOBHISTORY",
|
||||||
|
"HDFS_SECONDARYNAMENODE", "HIVE_METASTORE",
|
||||||
|
"HIVE_SERVER2"],
|
||||||
|
"worker": ["YARN_NODEMANAGER", "HDFS_DATANODE"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
REPLICATION_CONFIGS = {
|
||||||
|
"vanilla": {
|
||||||
|
"1.2.1": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
},
|
||||||
|
"2.3.0": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
},
|
||||||
|
"2.4.1": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
},
|
||||||
|
"2.6.0": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"hdp": {
|
||||||
|
"1.3.2": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
},
|
||||||
|
"2.0.6": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
},
|
||||||
|
"2.2": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs.replication"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cdh": {
|
||||||
|
"5": {
|
||||||
|
"target": "HDFS",
|
||||||
|
"config_name": "dfs_replication"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -20,6 +20,7 @@ from oslo_utils import uuidutils
|
|||||||
from saharaclient.api import base as sahara_base
|
from saharaclient.api import base as sahara_base
|
||||||
|
|
||||||
from rally.benchmark.scenarios import base
|
from rally.benchmark.scenarios import base
|
||||||
|
from rally.benchmark.scenarios.sahara import consts as sahara_consts
|
||||||
from rally.benchmark import utils as bench_utils
|
from rally.benchmark import utils as bench_utils
|
||||||
from rally.common.i18n import _
|
from rally.common.i18n import _
|
||||||
from rally.common import log as logging
|
from rally.common import log as logging
|
||||||
@ -51,87 +52,6 @@ class SaharaScenario(base.Scenario):
|
|||||||
|
|
||||||
RESOURCE_NAME_LENGTH = 20
|
RESOURCE_NAME_LENGTH = 20
|
||||||
|
|
||||||
# TODO(nkonovalov): Add other provisioning plugins
|
|
||||||
NODE_PROCESSES = {
|
|
||||||
"vanilla": {
|
|
||||||
"1.2.1": {
|
|
||||||
"master": ["namenode", "jobtracker", "oozie"],
|
|
||||||
"worker": ["datanode", "tasktracker"]
|
|
||||||
},
|
|
||||||
"2.3.0": {
|
|
||||||
"master": ["namenode", "resourcemanager", "historyserver",
|
|
||||||
"oozie"],
|
|
||||||
"worker": ["datanode", "nodemanager"]
|
|
||||||
},
|
|
||||||
"2.4.1": {
|
|
||||||
"master": ["namenode", "resourcemanager", "historyserver",
|
|
||||||
"oozie"],
|
|
||||||
"worker": ["datanode", "nodemanager"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"hdp": {
|
|
||||||
"1.3.2": {
|
|
||||||
"master": ["JOBTRACKER", "NAMENODE", "SECONDARY_NAMENODE",
|
|
||||||
"GANGLIA_SERVER", "NAGIOS_SERVER",
|
|
||||||
"AMBARI_SERVER", "OOZIE_SERVER"],
|
|
||||||
"worker": ["TASKTRACKER", "DATANODE", "HDFS_CLIENT",
|
|
||||||
"MAPREDUCE_CLIENT", "OOZIE_CLIENT", "PIG"]
|
|
||||||
},
|
|
||||||
"2.0.6": {
|
|
||||||
"manager": ["AMBARI_SERVER", "GANGLIA_SERVER",
|
|
||||||
"NAGIOS_SERVER"],
|
|
||||||
"master": ["NAMENODE", "SECONDARY_NAMENODE",
|
|
||||||
"ZOOKEEPER_SERVER", "ZOOKEEPER_CLIENT",
|
|
||||||
"HISTORYSERVER", "RESOURCEMANAGER",
|
|
||||||
"OOZIE_SERVER"],
|
|
||||||
"worker": ["DATANODE", "HDFS_CLIENT", "ZOOKEEPER_CLIENT",
|
|
||||||
"PIG", "MAPREDUCE2_CLIENT", "YARN_CLIENT",
|
|
||||||
"NODEMANAGER", "OOZIE_CLIENT"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cdh": {
|
|
||||||
"5": {
|
|
||||||
"manager": ["MANAGER"],
|
|
||||||
"master": ["NAMENODE", "SECONDARYNAMENODE", "RESOURCEMANAGER",
|
|
||||||
"JOBHISTORY", "OOZIE_SERVER"],
|
|
||||||
"worker": ["DATANODE", "NODEMANAGER"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
REPLICATION_CONFIGS = {
|
|
||||||
"vanilla": {
|
|
||||||
"1.2.1": {
|
|
||||||
"target": "HDFS",
|
|
||||||
"config_name": "dfs.replication"
|
|
||||||
},
|
|
||||||
"2.3.0": {
|
|
||||||
"target": "HDFS",
|
|
||||||
"config_name": "dfs.replication"
|
|
||||||
},
|
|
||||||
"2.4.1": {
|
|
||||||
"target": "HDFS",
|
|
||||||
"config_name": "dfs.replication"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"hdp": {
|
|
||||||
"1.3.2": {
|
|
||||||
"target": "HDFS",
|
|
||||||
"config_name": "dfs.replication"
|
|
||||||
},
|
|
||||||
"2.0.6": {
|
|
||||||
"target": "HDFS",
|
|
||||||
"config_name": "dfs.replication"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cdh": {
|
|
||||||
"5": {
|
|
||||||
"target": "HDFS",
|
|
||||||
"config_name": "dfs_replication"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@base.atomic_action_timer("sahara.list_node_group_templates")
|
@base.atomic_action_timer("sahara.list_node_group_templates")
|
||||||
def _list_node_group_templates(self):
|
def _list_node_group_templates(self):
|
||||||
"""Return user Node Group Templates list."""
|
"""Return user Node Group Templates list."""
|
||||||
@ -155,8 +75,8 @@ class SaharaScenario(base.Scenario):
|
|||||||
plugin_name=plugin_name,
|
plugin_name=plugin_name,
|
||||||
hadoop_version=hadoop_version,
|
hadoop_version=hadoop_version,
|
||||||
flavor_id=flavor_id,
|
flavor_id=flavor_id,
|
||||||
node_processes=self.NODE_PROCESSES[plugin_name][hadoop_version]
|
node_processes=sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
["master"])
|
[hadoop_version]["master"])
|
||||||
|
|
||||||
@base.atomic_action_timer("sahara.create_worker_node_group_template")
|
@base.atomic_action_timer("sahara.create_worker_node_group_template")
|
||||||
def _create_worker_node_group_template(self, flavor_id, plugin_name,
|
def _create_worker_node_group_template(self, flavor_id, plugin_name,
|
||||||
@ -176,8 +96,8 @@ class SaharaScenario(base.Scenario):
|
|||||||
plugin_name=plugin_name,
|
plugin_name=plugin_name,
|
||||||
hadoop_version=hadoop_version,
|
hadoop_version=hadoop_version,
|
||||||
flavor_id=flavor_id,
|
flavor_id=flavor_id,
|
||||||
node_processes=self.NODE_PROCESSES[plugin_name][hadoop_version]
|
node_processes=sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
["worker"])
|
[hadoop_version]["worker"])
|
||||||
|
|
||||||
@base.atomic_action_timer("sahara.delete_node_group_template")
|
@base.atomic_action_timer("sahara.delete_node_group_template")
|
||||||
def _delete_node_group_template(self, node_group):
|
def _delete_node_group_template(self, node_group):
|
||||||
@ -288,7 +208,7 @@ class SaharaScenario(base.Scenario):
|
|||||||
plugin_name):
|
plugin_name):
|
||||||
replication_value = min(workers_count, 3)
|
replication_value = min(workers_count, 3)
|
||||||
# 3 is a default Hadoop replication
|
# 3 is a default Hadoop replication
|
||||||
conf = self.REPLICATION_CONFIGS[plugin_name][hadoop_version]
|
conf = sahara_consts.REPLICATION_CONFIGS[plugin_name][hadoop_version]
|
||||||
LOG.debug("Using replication factor: %s" % replication_value)
|
LOG.debug("Using replication factor: %s" % replication_value)
|
||||||
replication_config = {
|
replication_config = {
|
||||||
conf["target"]: {
|
conf["target"]: {
|
||||||
@ -339,26 +259,27 @@ class SaharaScenario(base.Scenario):
|
|||||||
{
|
{
|
||||||
"name": "master-ng",
|
"name": "master-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": flavor_id,
|
||||||
"node_processes": self.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["master"],
|
[hadoop_version]["master"],
|
||||||
"count": 1
|
"count": 1
|
||||||
}, {
|
}, {
|
||||||
"name": "worker-ng",
|
"name": "worker-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": flavor_id,
|
||||||
"node_processes": self.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["worker"],
|
[hadoop_version]["worker"],
|
||||||
"count": workers_count
|
"count": workers_count
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
if "manager" in self.NODE_PROCESSES[plugin_name][hadoop_version]:
|
if "manager" in (sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
|
[hadoop_version]):
|
||||||
# Adding manager group separately as it is supported only in
|
# Adding manager group separately as it is supported only in
|
||||||
# specific configurations.
|
# specific configurations.
|
||||||
|
|
||||||
node_groups.append({
|
node_groups.append({
|
||||||
"name": "manager-ng",
|
"name": "manager-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": flavor_id,
|
||||||
"node_processes": self.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["manager"],
|
[hadoop_version]["manager"],
|
||||||
"count": 1
|
"count": 1
|
||||||
})
|
})
|
||||||
|
@ -51,7 +51,9 @@ class SaharaUtilsTestCase(test.TestCase):
|
|||||||
@mock.patch(SAHARA_UTILS + ".SaharaScenario._generate_random_name",
|
@mock.patch(SAHARA_UTILS + ".SaharaScenario._generate_random_name",
|
||||||
return_value="random_name")
|
return_value="random_name")
|
||||||
@mock.patch(SAHARA_UTILS + ".SaharaScenario.clients")
|
@mock.patch(SAHARA_UTILS + ".SaharaScenario.clients")
|
||||||
def test_create_node_group_templates(self, mock_clients, mock_random_name):
|
@mock.patch(SAHARA_UTILS + ".sahara_consts")
|
||||||
|
def test_create_node_group_templates(self, mock_constants, mock_clients,
|
||||||
|
mock_random_name):
|
||||||
|
|
||||||
scenario = utils.SaharaScenario()
|
scenario = utils.SaharaScenario()
|
||||||
mock_processes = {
|
mock_processes = {
|
||||||
@ -63,7 +65,7 @@ class SaharaUtilsTestCase(test.TestCase):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
scenario.NODE_PROCESSES = mock_processes
|
mock_constants.NODE_PROCESSES = mock_processes
|
||||||
|
|
||||||
scenario._create_master_node_group_template(
|
scenario._create_master_node_group_template(
|
||||||
flavor_id="test_flavor",
|
flavor_id="test_flavor",
|
||||||
@ -117,7 +119,9 @@ class SaharaUtilsTestCase(test.TestCase):
|
|||||||
@mock.patch(SAHARA_UTILS + ".SaharaScenario._generate_random_name",
|
@mock.patch(SAHARA_UTILS + ".SaharaScenario._generate_random_name",
|
||||||
return_value="random_name")
|
return_value="random_name")
|
||||||
@mock.patch(SAHARA_UTILS + ".SaharaScenario.clients")
|
@mock.patch(SAHARA_UTILS + ".SaharaScenario.clients")
|
||||||
def test_launch_cluster(self, mock_clients, mock_random_name):
|
@mock.patch(SAHARA_UTILS + ".sahara_consts")
|
||||||
|
def test_launch_cluster(self, mock_constants,
|
||||||
|
mock_clients, mock_random_name):
|
||||||
|
|
||||||
clients_values = mock.MagicMock(return_value=[consts.Service.NEUTRON])
|
clients_values = mock.MagicMock(return_value=[consts.Service.NEUTRON])
|
||||||
mock_clients.services.return_value = mock.MagicMock(
|
mock_clients.services.return_value = mock.MagicMock(
|
||||||
@ -181,8 +185,8 @@ class SaharaUtilsTestCase(test.TestCase):
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
scenario.NODE_PROCESSES = mock_processes
|
mock_constants.NODE_PROCESSES = mock_processes
|
||||||
scenario.REPLICATION_CONFIGS = mock_configs
|
mock_constants.REPLICATION_CONFIGS = mock_configs
|
||||||
|
|
||||||
mock_clients("sahara").clusters.create.return_value = mock.MagicMock(
|
mock_clients("sahara").clusters.create.return_value = mock.MagicMock(
|
||||||
id="test_cluster_id")
|
id="test_cluster_id")
|
||||||
@ -220,7 +224,9 @@ class SaharaUtilsTestCase(test.TestCase):
|
|||||||
@mock.patch(SAHARA_UTILS + ".SaharaScenario._generate_random_name",
|
@mock.patch(SAHARA_UTILS + ".SaharaScenario._generate_random_name",
|
||||||
return_value="random_name")
|
return_value="random_name")
|
||||||
@mock.patch(SAHARA_UTILS + ".SaharaScenario.clients")
|
@mock.patch(SAHARA_UTILS + ".SaharaScenario.clients")
|
||||||
def test_launch_cluster_error(self, mock_clients, mock_random_name):
|
@mock.patch(SAHARA_UTILS + ".sahara_consts")
|
||||||
|
def test_launch_cluster_error(self, mock_constants, mock_clients,
|
||||||
|
mock_random_name):
|
||||||
|
|
||||||
scenario = utils.SaharaScenario(clients=mock.MagicMock())
|
scenario = utils.SaharaScenario(clients=mock.MagicMock())
|
||||||
mock_processes = {
|
mock_processes = {
|
||||||
@ -241,8 +247,8 @@ class SaharaUtilsTestCase(test.TestCase):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
scenario.NODE_PROCESSES = mock_processes
|
mock_constants.NODE_PROCESSES = mock_processes
|
||||||
scenario.REPLICATION_CONFIGS = mock_configs
|
mock_constants.REPLICATION_CONFIGS = mock_configs
|
||||||
|
|
||||||
mock_clients("sahara").clusters.create.return_value = mock.MagicMock(
|
mock_clients("sahara").clusters.create.return_value = mock.MagicMock(
|
||||||
id="test_cluster_id")
|
id="test_cluster_id")
|
||||||
|
Loading…
Reference in New Issue
Block a user