[Sahara] Improved flavor paramter for clusters
The flavors for Master and Worker node groups is now configured separatelly in both scenarios and context. This allows setting up better balanced clusters when a Master node may require a bigger flavor than workers. This is almost always a requirement for plugins like CDH or HDP. Change-Id: I78f4145fecb327dae06ead02bd1aa85e2ba2b79a
This commit is contained in:
parent
6773ccbee7
commit
400cf85990
@ -4,7 +4,9 @@
|
||||
SaharaClusters.create_and_delete_cluster:
|
||||
-
|
||||
args:
|
||||
flavor:
|
||||
master_flavor:
|
||||
name: "m1.small"
|
||||
worker_flavor:
|
||||
name: "m1.small"
|
||||
workers_count: 1
|
||||
plugin_name: "vanilla"
|
||||
|
@ -51,6 +51,12 @@ class SaharaCluster(context.Context):
|
||||
"flavor_id": {
|
||||
"type": "string",
|
||||
},
|
||||
"master_flavor_id": {
|
||||
"type": "string",
|
||||
},
|
||||
"worker_flavor_id": {
|
||||
"type": "string",
|
||||
},
|
||||
"floating_ip_pool": {
|
||||
"type": "string",
|
||||
},
|
||||
@ -86,7 +92,7 @@ class SaharaCluster(context.Context):
|
||||
},
|
||||
"additionalProperties": False,
|
||||
"required": ["plugin_name", "hadoop_version", "workers_count",
|
||||
"flavor_id"]
|
||||
"master_flavor_id", "worker_flavor_id"]
|
||||
}
|
||||
|
||||
@logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Cluster`"))
|
||||
@ -113,7 +119,9 @@ class SaharaCluster(context.Context):
|
||||
cluster = scenario._launch_cluster(
|
||||
plugin_name=self.config["plugin_name"],
|
||||
hadoop_version=self.config["hadoop_version"],
|
||||
flavor_id=self.config["flavor_id"],
|
||||
flavor_id=self.config.get("flavor_id", None),
|
||||
master_flavor_id=self.config["master_flavor_id"],
|
||||
worker_flavor_id=self.config["worker_flavor_id"],
|
||||
workers_count=self.config["workers_count"],
|
||||
image_id=image_id,
|
||||
floating_ip_pool=floating_ip_pool,
|
||||
|
@ -27,16 +27,22 @@ class SaharaClusters(utils.SaharaScenario):
|
||||
"""Benchmark scenarios for Sahara clusters."""
|
||||
|
||||
@types.set(flavor=types.FlavorResourceType,
|
||||
master_flavor=types.FlavorResourceType,
|
||||
worker_flavor=types.FlavorResourceType,
|
||||
neutron_net=types.NeutronNetworkResourceType,
|
||||
floating_ip_pool=types.NeutronNetworkResourceType)
|
||||
@validation.flavor_exists("flavor")
|
||||
@validation.flavor_exists("master_flavor")
|
||||
@validation.flavor_exists("worker_flavor")
|
||||
@validation.required_contexts("users", "sahara_image")
|
||||
@validation.number("workers_count", minval=1, integer_only=True)
|
||||
@validation.required_services(consts.Service.SAHARA)
|
||||
@validation.required_openstack(users=True)
|
||||
@scenario.configure(context={"cleanup": ["sahara"]})
|
||||
def create_and_delete_cluster(self, flavor, workers_count, plugin_name,
|
||||
hadoop_version, floating_ip_pool=None,
|
||||
def create_and_delete_cluster(self, workers_count, plugin_name,
|
||||
hadoop_version,
|
||||
master_flavor=None, worker_flavor=None,
|
||||
flavor=None,
|
||||
floating_ip_pool=None,
|
||||
volumes_per_node=None,
|
||||
volumes_size=None, auto_security_group=None,
|
||||
security_groups=None, node_configs=None,
|
||||
@ -49,7 +55,11 @@ class SaharaClusters(utils.SaharaScenario):
|
||||
'Active' and deletes it.
|
||||
|
||||
:param flavor: Nova flavor that will be for nodes in the
|
||||
created node groups
|
||||
created node groups. Deprecated.
|
||||
:param master_flavor: Nova flavor that will be used for the master
|
||||
instance of the cluster
|
||||
:param worker_flavor: Nova flavor that will be used for the workers of
|
||||
the cluster
|
||||
:param workers_count: number of worker instances in a cluster
|
||||
:param plugin_name: name of a provisioning plugin
|
||||
:param hadoop_version: version of Hadoop distribution supported by
|
||||
@ -85,6 +95,8 @@ class SaharaClusters(utils.SaharaScenario):
|
||||
|
||||
cluster = self._launch_cluster(
|
||||
flavor_id=flavor,
|
||||
master_flavor_id=master_flavor,
|
||||
worker_flavor_id=worker_flavor,
|
||||
image_id=image_id,
|
||||
workers_count=workers_count,
|
||||
plugin_name=plugin_name,
|
||||
@ -101,14 +113,19 @@ class SaharaClusters(utils.SaharaScenario):
|
||||
|
||||
self._delete_cluster(cluster)
|
||||
|
||||
@types.set(flavor=types.FlavorResourceType)
|
||||
@validation.flavor_exists("flavor")
|
||||
@types.set(flavor=types.FlavorResourceType,
|
||||
master_flavor=types.FlavorResourceType,
|
||||
worker_flavor=types.FlavorResourceType)
|
||||
@validation.flavor_exists("master_flavor")
|
||||
@validation.flavor_exists("worker_flavor")
|
||||
@validation.required_services(consts.Service.SAHARA)
|
||||
@validation.required_contexts("users", "sahara_image")
|
||||
@validation.number("workers_count", minval=1, integer_only=True)
|
||||
@scenario.configure(context={"cleanup": ["sahara"]})
|
||||
def create_scale_delete_cluster(self, flavor, workers_count, plugin_name,
|
||||
def create_scale_delete_cluster(self, master_flavor, worker_flavor,
|
||||
workers_count, plugin_name,
|
||||
hadoop_version, deltas,
|
||||
flavor=None,
|
||||
floating_ip_pool=None,
|
||||
volumes_per_node=None, volumes_size=None,
|
||||
auto_security_group=None,
|
||||
@ -125,7 +142,11 @@ class SaharaClusters(utils.SaharaScenario):
|
||||
add 2 worker nodes to the cluster and the second will remove two.
|
||||
|
||||
:param flavor: Nova flavor that will be for nodes in the
|
||||
created node groups
|
||||
created node groups. Deprecated.
|
||||
:param master_flavor: Nova flavor that will be used for the master
|
||||
instance of the cluster
|
||||
:param worker_flavor: Nova flavor that will be used for the workers of
|
||||
the cluster
|
||||
:param workers_count: number of worker instances in a cluster
|
||||
:param plugin_name: name of a provisioning plugin
|
||||
:param hadoop_version: version of Hadoop distribution supported by
|
||||
@ -166,6 +187,8 @@ class SaharaClusters(utils.SaharaScenario):
|
||||
|
||||
cluster = self._launch_cluster(
|
||||
flavor_id=flavor,
|
||||
master_flavor_id=master_flavor,
|
||||
worker_flavor_id=worker_flavor,
|
||||
image_id=image_id,
|
||||
workers_count=workers_count,
|
||||
plugin_name=plugin_name,
|
||||
|
@ -244,10 +244,15 @@ class SaharaScenario(scenario.OpenStackScenario):
|
||||
}
|
||||
return replication_config
|
||||
|
||||
@logging.log_deprecated_args("`flavor_id` argument is deprecated. Use "
|
||||
"`master_flavor_id` and `worker_flavor_id` "
|
||||
"parameters.", rally_version="2.0",
|
||||
deprecated_args=["flavor_id"])
|
||||
@atomic.action_timer("sahara.launch_cluster")
|
||||
def _launch_cluster(self, plugin_name, hadoop_version, flavor_id,
|
||||
image_id, workers_count, floating_ip_pool=None,
|
||||
volumes_per_node=None,
|
||||
def _launch_cluster(self, plugin_name, hadoop_version, master_flavor_id,
|
||||
worker_flavor_id, image_id, workers_count,
|
||||
flavor_id=None,
|
||||
floating_ip_pool=None, volumes_per_node=None,
|
||||
volumes_size=None, auto_security_group=None,
|
||||
security_groups=None, node_configs=None,
|
||||
cluster_configs=None, enable_anti_affinity=False,
|
||||
@ -261,7 +266,9 @@ class SaharaScenario(scenario.OpenStackScenario):
|
||||
|
||||
:param plugin_name: provisioning plugin name
|
||||
:param hadoop_version: Hadoop version supported by the plugin
|
||||
:param flavor_id: flavor which will be used to create instances
|
||||
:param master_flavor_id: flavor which will be used to create master
|
||||
instance
|
||||
:param worker_flavor_id: flavor which will be used to create workers
|
||||
:param image_id: image id that will be used to boot instances
|
||||
:param workers_count: number of worker instances. All plugins will
|
||||
also add one Master instance and some plugins
|
||||
@ -295,16 +302,22 @@ class SaharaScenario(scenario.OpenStackScenario):
|
||||
else:
|
||||
proxies_count = 0
|
||||
|
||||
if flavor_id:
|
||||
# Note: the deprecated argument is used. Falling back to single
|
||||
# flavor behavior.
|
||||
master_flavor_id = flavor_id
|
||||
worker_flavor_id = flavor_id
|
||||
|
||||
node_groups = [
|
||||
{
|
||||
"name": "master-ng",
|
||||
"flavor_id": flavor_id,
|
||||
"flavor_id": master_flavor_id,
|
||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||
[hadoop_version]["master"],
|
||||
"count": 1
|
||||
}, {
|
||||
"name": "worker-ng",
|
||||
"flavor_id": flavor_id,
|
||||
"flavor_id": worker_flavor_id,
|
||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||
[hadoop_version]["worker"],
|
||||
"count": workers_count - proxies_count
|
||||
@ -314,7 +327,7 @@ class SaharaScenario(scenario.OpenStackScenario):
|
||||
if proxies_count:
|
||||
node_groups.append({
|
||||
"name": "proxy-ng",
|
||||
"flavor_id": flavor_id,
|
||||
"flavor_id": worker_flavor_id,
|
||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||
[hadoop_version]["worker"],
|
||||
"count": proxies_count
|
||||
@ -327,7 +340,7 @@ class SaharaScenario(scenario.OpenStackScenario):
|
||||
|
||||
node_groups.append({
|
||||
"name": "manager-ng",
|
||||
"flavor_id": flavor_id,
|
||||
"flavor_id": worker_flavor_id,
|
||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||
[hadoop_version]["manager"],
|
||||
"count": 1
|
||||
|
@ -2,8 +2,11 @@
|
||||
"SaharaClusters.create_and_delete_cluster": [
|
||||
{
|
||||
"args": {
|
||||
"flavor": {
|
||||
"name": "m1.small"
|
||||
"master_flavor": {
|
||||
"name": "m1.large"
|
||||
},
|
||||
"worker_flavor": {
|
||||
"name": "m1.medium"
|
||||
},
|
||||
"workers_count": 3,
|
||||
"plugin_name": "vanilla",
|
||||
|
@ -2,8 +2,10 @@
|
||||
SaharaClusters.create_and_delete_cluster:
|
||||
-
|
||||
args:
|
||||
flavor:
|
||||
name: "m1.small"
|
||||
master_flavor:
|
||||
name: "m1.large"
|
||||
worker_flavor:
|
||||
name: "m1.medium"
|
||||
workers_count: 3
|
||||
plugin_name: "vanilla"
|
||||
hadoop_version: "2.3.0"
|
||||
|
@ -2,8 +2,11 @@
|
||||
"SaharaClusters.create_scale_delete_cluster": [
|
||||
{
|
||||
"args": {
|
||||
"flavor": {
|
||||
"name": "m1.small"
|
||||
"master_flavor": {
|
||||
"name": "m1.large"
|
||||
},
|
||||
"worker_flavor": {
|
||||
"name": "m1.medium"
|
||||
},
|
||||
"workers_count": 3,
|
||||
"deltas": [1, -1, 1, -1],
|
||||
|
@ -2,8 +2,10 @@
|
||||
SaharaClusters.create_scale_delete_cluster:
|
||||
-
|
||||
args:
|
||||
flavor:
|
||||
name: "m1.small"
|
||||
master_flavor:
|
||||
name: "m1.large"
|
||||
worker_flavor:
|
||||
name: "m1.medium"
|
||||
workers_count: 3
|
||||
deltas:
|
||||
- 1
|
||||
|
@ -47,7 +47,8 @@
|
||||
]
|
||||
},
|
||||
"sahara_cluster": {
|
||||
"flavor_id": "2",
|
||||
"master_flavor_id": "4",
|
||||
"worker_flavor_id": "3",
|
||||
"workers_count": 3,
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
|
@ -44,7 +44,8 @@
|
||||
name: "tests.jar"
|
||||
download_url: "http://repo1.maven.org/maven2/org/apache/hadoop/hadoop-hdfs/2.6.0/hadoop-hdfs-2.6.0-tests.jar"
|
||||
sahara_cluster:
|
||||
flavor_id: "2"
|
||||
master_flavor_id: "4"
|
||||
worker_flavor_id: "3"
|
||||
workers_count: 3
|
||||
plugin_name: "vanilla"
|
||||
hadoop_version: "2.6.0"
|
||||
|
@ -47,7 +47,8 @@
|
||||
]
|
||||
},
|
||||
"sahara_cluster": {
|
||||
"flavor_id": "2",
|
||||
"master_flavor_id": "4",
|
||||
"worker_flavor_id": "3",
|
||||
"workers_count": 3,
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
|
@ -47,7 +47,8 @@
|
||||
name: "tests.jar"
|
||||
download_url: "http://repo1.maven.org/maven2/org/apache/hadoop/hadoop-hdfs/2.6.0/hadoop-hdfs-2.6.0-tests.jar"
|
||||
sahara_cluster:
|
||||
flavor_id: "2"
|
||||
master_flavor_id: "4"
|
||||
worker_flavor_id: "3"
|
||||
workers_count: 3
|
||||
plugin_name: "vanilla"
|
||||
hadoop_version: "2.6.0"
|
||||
|
@ -35,7 +35,8 @@
|
||||
]
|
||||
},
|
||||
"sahara_cluster": {
|
||||
"flavor_id": "2",
|
||||
"master_flavor_id": "4",
|
||||
"worker_flavor_id": "3",
|
||||
"workers_count": 3,
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
|
@ -28,7 +28,8 @@
|
||||
name: "examples.jar"
|
||||
download_url: "http://repo1.maven.org/maven2/org/apache/hadoop/hadoop-mapreduce-examples/2.6.0/hadoop-mapreduce-examples-2.6.0.jar"
|
||||
sahara_cluster:
|
||||
flavor_id: "2"
|
||||
master_flavor_id: "4"
|
||||
worker_flavor_id: "3"
|
||||
workers_count: 3
|
||||
plugin_name: "vanilla"
|
||||
hadoop_version: "2.6.0"
|
||||
|
@ -44,7 +44,8 @@
|
||||
"output_url_prefix": "/out_"
|
||||
},
|
||||
"sahara_cluster": {
|
||||
"flavor_id": "2",
|
||||
"master_flavor_id": "4",
|
||||
"worker_flavor_id": "3",
|
||||
"workers_count": 3,
|
||||
"plugin_name": "vanilla",
|
||||
"hadoop_version": "2.6.0",
|
||||
|
@ -33,7 +33,8 @@
|
||||
output_type: "hdfs"
|
||||
output_url_prefix: "/out_"
|
||||
sahara_cluster:
|
||||
flavor_id: "2"
|
||||
master_flavor_id: "4"
|
||||
worker_flavor_id: "3"
|
||||
workers_count: 3
|
||||
plugin_name: "vanilla"
|
||||
hadoop_version: "2.6.0"
|
||||
|
@ -55,7 +55,8 @@ class SaharaClusterTestCase(test.ScenarioTestCase):
|
||||
"users_per_tenant": self.users_per_tenant
|
||||
},
|
||||
"sahara_cluster": {
|
||||
"flavor_id": "test_flavor",
|
||||
"master_flavor_id": "test_flavor_m",
|
||||
"worker_flavor_id": "test_flavor_w",
|
||||
"workers_count": 2,
|
||||
"plugin_name": "test_plugin",
|
||||
"hadoop_version": "test_version"
|
||||
@ -77,9 +78,11 @@ class SaharaClusterTestCase(test.ScenarioTestCase):
|
||||
|
||||
for i in self.tenants:
|
||||
launch_cluster_calls.append(mock.call(
|
||||
flavor_id=None,
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version",
|
||||
flavor_id="test_flavor",
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
workers_count=2,
|
||||
image_id=self.context["tenants"][i]["sahara"]["image"],
|
||||
floating_ip_pool=None,
|
||||
@ -115,9 +118,11 @@ class SaharaClusterTestCase(test.ScenarioTestCase):
|
||||
|
||||
for i in self.tenants:
|
||||
launch_cluster_calls.append(mock.call(
|
||||
flavor_id=None,
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version",
|
||||
flavor_id="test_flavor",
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
workers_count=2,
|
||||
image_id=self.context["tenants"][i]["sahara"]["image"],
|
||||
floating_ip_pool=None,
|
||||
|
@ -40,13 +40,59 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
|
||||
}
|
||||
}
|
||||
clusters_scenario.create_and_delete_cluster(
|
||||
flavor="test_flavor",
|
||||
master_flavor="test_flavor_m",
|
||||
worker_flavor="test_flavor_w",
|
||||
workers_count=5,
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version")
|
||||
|
||||
mock__launch_cluster.assert_called_once_with(
|
||||
flavor_id="test_flavor",
|
||||
flavor_id=None,
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
image_id="test_image",
|
||||
workers_count=5,
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version",
|
||||
floating_ip_pool=None,
|
||||
volumes_per_node=None,
|
||||
volumes_size=None,
|
||||
auto_security_group=None,
|
||||
security_groups=None,
|
||||
node_configs=None,
|
||||
cluster_configs=None,
|
||||
enable_anti_affinity=False,
|
||||
enable_proxy=False)
|
||||
|
||||
mock__delete_cluster.assert_called_once_with(
|
||||
mock__launch_cluster.return_value)
|
||||
|
||||
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
|
||||
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
|
||||
return_value=mock.MagicMock(id=42))
|
||||
def test_create_and_delete_cluster_deprecated_flavor(
|
||||
self, mock__launch_cluster, mock__delete_cluster):
|
||||
clusters_scenario = clusters.SaharaClusters(self.context)
|
||||
|
||||
clusters_scenario.context = {
|
||||
"tenant": {
|
||||
"sahara": {
|
||||
"image": "test_image",
|
||||
}
|
||||
}
|
||||
}
|
||||
clusters_scenario.create_and_delete_cluster(
|
||||
flavor="test_deprecated_arg",
|
||||
master_flavor=None,
|
||||
worker_flavor=None,
|
||||
workers_count=5,
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version")
|
||||
|
||||
mock__launch_cluster.assert_called_once_with(
|
||||
flavor_id="test_deprecated_arg",
|
||||
master_flavor_id=None,
|
||||
worker_flavor_id=None,
|
||||
image_id="test_image",
|
||||
workers_count=5,
|
||||
plugin_name="test_plugin",
|
||||
@ -85,14 +131,17 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
|
||||
}
|
||||
|
||||
clusters_scenario.create_scale_delete_cluster(
|
||||
flavor="test_flavor",
|
||||
master_flavor="test_flavor_m",
|
||||
worker_flavor="test_flavor_w",
|
||||
workers_count=5,
|
||||
deltas=[1, -1],
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version")
|
||||
|
||||
mock__launch_cluster.assert_called_once_with(
|
||||
flavor_id="test_flavor",
|
||||
flavor_id=None,
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
image_id="test_image",
|
||||
workers_count=5,
|
||||
plugin_name="test_plugin",
|
||||
|
@ -168,7 +168,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
node_groups = [
|
||||
{
|
||||
"name": "master-ng",
|
||||
"flavor_id": "test_flavor",
|
||||
"flavor_id": "test_flavor_m",
|
||||
"node_processes": ["p1"],
|
||||
"floating_ip_pool": floating_ip_pool_uuid,
|
||||
"count": 1,
|
||||
@ -177,7 +177,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
"node_configs": {"HDFS": {"local_config": "local_value"}},
|
||||
}, {
|
||||
"name": "worker-ng",
|
||||
"flavor_id": "test_flavor",
|
||||
"flavor_id": "test_flavor_w",
|
||||
"node_processes": ["p2"],
|
||||
"floating_ip_pool": floating_ip_pool_uuid,
|
||||
"volumes_per_node": 5,
|
||||
@ -201,7 +201,8 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
scenario._launch_cluster(
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version",
|
||||
flavor_id="test_flavor",
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
image_id="test_image",
|
||||
floating_ip_pool=floating_ip_pool_uuid,
|
||||
volumes_per_node=5,
|
||||
@ -271,7 +272,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
node_groups = [
|
||||
{
|
||||
"name": "master-ng",
|
||||
"flavor_id": "test_flavor",
|
||||
"flavor_id": "test_flavor_m",
|
||||
"node_processes": ["p1"],
|
||||
"floating_ip_pool": floating_ip_pool_uuid,
|
||||
"count": 1,
|
||||
@ -281,7 +282,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
"is_proxy_gateway": True
|
||||
}, {
|
||||
"name": "worker-ng",
|
||||
"flavor_id": "test_flavor",
|
||||
"flavor_id": "test_flavor_w",
|
||||
"node_processes": ["p2"],
|
||||
"volumes_per_node": 5,
|
||||
"volumes_size": 10,
|
||||
@ -291,7 +292,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
"node_configs": {"HDFS": {"local_config": "local_value"}},
|
||||
}, {
|
||||
"name": "proxy-ng",
|
||||
"flavor_id": "test_flavor",
|
||||
"flavor_id": "test_flavor_w",
|
||||
"node_processes": ["p2"],
|
||||
"floating_ip_pool": floating_ip_pool_uuid,
|
||||
"volumes_per_node": 5,
|
||||
@ -316,7 +317,8 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
scenario._launch_cluster(
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version",
|
||||
flavor_id="test_flavor",
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
image_id="test_image",
|
||||
floating_ip_pool=floating_ip_pool_uuid,
|
||||
volumes_per_node=5,
|
||||
@ -380,7 +382,8 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
||||
scenario._launch_cluster,
|
||||
plugin_name="test_plugin",
|
||||
hadoop_version="test_version",
|
||||
flavor_id="test_flavor",
|
||||
master_flavor_id="test_flavor_m",
|
||||
worker_flavor_id="test_flavor_w",
|
||||
image_id="test_image",
|
||||
floating_ip_pool="test_pool",
|
||||
volumes_per_node=5,
|
||||
|
Loading…
Reference in New Issue
Block a user