Small refactoring for vanilla 2

The recommendations for all vanilla 2 plugins are almost same,
so we should move all code related to recommendations to common
hadoop2 module.

Related blueprint: support-vanilla-2-7-1

Change-Id: Ic88f5ce529f3d43b77b7a630ab1d8a3967ba0707
This commit is contained in:
Vitaly Gridnev 2015-08-03 14:11:14 +03:00
parent f89dc5b242
commit f342382ab5
3 changed files with 53 additions and 36 deletions

View File

@ -0,0 +1,48 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.plugins import recommendations_utils as ru
def recommend_configs(cluster, plugin_configs):
yarn_configs = [
'yarn.nodemanager.resource.memory-mb',
'yarn.scheduler.minimum-allocation-mb',
'yarn.scheduler.maximum-allocation-mb',
'yarn.nodemanager.vmem-check-enabled',
]
mapred_configs = [
'yarn.app.mapreduce.am.resource.mb',
'yarn.app.mapreduce.am.command-opts',
'mapreduce.map.memory.mb',
'mapreduce.reduce.memory.mb',
'mapreduce.map.java.opts',
'mapreduce.reduce.java.opts',
'mapreduce.task.io.sort.mb',
]
configs_to_configure = {
'cluster_configs': {
'dfs.replication': ('HDFS', 'dfs.replication')
},
'node_configs': {
}
}
for mapr in mapred_configs:
configs_to_configure['node_configs'][mapr] = ("MapReduce", mapr)
for yarn in yarn_configs:
configs_to_configure['node_configs'][yarn] = ('YARN', yarn)
provider = ru.HadoopAutoConfigsProvider(
configs_to_configure, plugin_configs, cluster)
provider.apply_recommended_configs()

View File

@ -18,10 +18,10 @@ from oslo_log import log as logging
from sahara import conductor from sahara import conductor
from sahara import context from sahara import context
from sahara.plugins import recommendations_utils as ru
from sahara.plugins import utils from sahara.plugins import utils
from sahara.plugins.vanilla import abstractversionhandler as avm from sahara.plugins.vanilla import abstractversionhandler as avm
from sahara.plugins.vanilla.hadoop2 import config as c from sahara.plugins.vanilla.hadoop2 import config as c
from sahara.plugins.vanilla.hadoop2 import recommendations_utils as ru
from sahara.plugins.vanilla.hadoop2 import run_scripts as run from sahara.plugins.vanilla.hadoop2 import run_scripts as run
from sahara.plugins.vanilla.hadoop2 import scaling as sc from sahara.plugins.vanilla.hadoop2 import scaling as sc
from sahara.plugins.vanilla.hadoop2 import validation as vl from sahara.plugins.vanilla.hadoop2 import validation as vl
@ -192,32 +192,4 @@ class VersionHandler(avm.AbstractVersionHandler):
return c.get_open_ports(node_group) return c.get_open_ports(node_group)
def recommend_configs(self, cluster): def recommend_configs(self, cluster):
yarn_configs = [ ru.recommend_configs(cluster, self.get_plugin_configs())
'yarn.nodemanager.resource.memory-mb',
'yarn.scheduler.minimum-allocation-mb',
'yarn.scheduler.maximum-allocation-mb',
'yarn.nodemanager.vmem-check-enabled',
]
mapred_configs = [
'yarn.app.mapreduce.am.resource.mb',
'yarn.app.mapreduce.am.command-opts',
'mapreduce.map.memory.mb',
'mapreduce.reduce.memory.mb',
'mapreduce.map.java.opts',
'mapreduce.reduce.java.opts',
'mapreduce.task.io.sort.mb',
]
configs_to_configure = {
'cluster_configs': {
'dfs.replication': ('HDFS', 'dfs.replication')
},
'node_configs': {
}
}
for mapr in mapred_configs:
configs_to_configure['node_configs'][mapr] = ("MapReduce", mapr)
for yarn in yarn_configs:
configs_to_configure['node_configs'][yarn] = ('YARN', yarn)
provider = ru.HadoopAutoConfigsProvider(
configs_to_configure, self.get_plugin_configs(), cluster)
provider.apply_recommended_configs()

View File

@ -16,7 +16,7 @@
import mock import mock
import testtools import testtools
from sahara.plugins.vanilla.v2_6_0 import versionhandler from sahara.plugins.vanilla.hadoop2 import recommendations_utils as ru
CONFIGURATION_SCHEMA = { CONFIGURATION_SCHEMA = {
'cluster_configs': { 'cluster_configs': {
@ -52,12 +52,9 @@ CONFIGURATION_SCHEMA = {
class TestVersionHandler(testtools.TestCase): class TestVersionHandler(testtools.TestCase):
@mock.patch('sahara.plugins.recommendations_utils.' @mock.patch('sahara.plugins.recommendations_utils.'
'HadoopAutoConfigsProvider') 'HadoopAutoConfigsProvider')
@mock.patch('sahara.plugins.vanilla.v2_6_0.versionhandler.VersionHandler.' def test_recommend_configs(self, provider):
'get_plugin_configs')
def test_recommend_configs(self, fake_plugin_configs, provider):
f_cluster, f_configs = mock.Mock(), mock.Mock() f_cluster, f_configs = mock.Mock(), mock.Mock()
fake_plugin_configs.return_value = f_configs ru.recommend_configs(f_cluster, f_configs)
versionhandler.VersionHandler().recommend_configs(f_cluster)
self.assertEqual([ self.assertEqual([
mock.call(CONFIGURATION_SCHEMA, f_configs, f_cluster) mock.call(CONFIGURATION_SCHEMA, f_configs, f_cluster)
], provider.call_args_list) ], provider.call_args_list)