Refine the code for CDH PluginUtils class

All of the versions of PluginUtils have the same method:
start_cludera_manager, get_config_values, so move them to the base
class. And remove the duplicate method create_hive_hive_directory
in v5.plugin_utils.

partially implements: blueprint cdh-plugin-refactoring

Change-Id: Ida9146fa02449c7969bbc885ff134e0b8001465a
This commit is contained in:
Jaxon Wang 2016-03-01 12:18:06 -05:00 committed by Trevor McKay
parent 9a584387bc
commit 383ce74c6d
7 changed files with 22 additions and 53 deletions

View File

@ -285,15 +285,6 @@ class AbstractPluginUtils(object):
with manager.remote() as r:
self.db_helper.create_hive_database(cluster, r)
def create_hive_hive_directory(self, cluster):
# Hive requires /tmp/hive-hive directory
namenode = self.get_namenode(cluster)
with namenode.remote() as r:
r.execute_command(
'sudo su - -c "hadoop fs -mkdir -p /tmp/hive-hive" hdfs')
r.execute_command(
'sudo su - -c "hadoop fs -chown hive /tmp/hive-hive" hdfs')
def install_extjs(self, cluster):
extjs_remote_location = self.c_helper.get_extjs_lib_url(cluster)
extjs_vm_location_dir = '/var/lib/oozie'
@ -385,3 +376,11 @@ class AbstractPluginUtils(object):
provider = CDHPluginAutoConfigsProvider(
AUTO_CONFIGURATION_SCHEMA, plugin_configs, cluster, scaling)
provider.apply_recommended_configs()
def start_cloudera_manager(self, cluster):
self._start_cloudera_manager(
cluster, self.c_helper.AWAIT_MANAGER_STARTING_TIMEOUT)
def get_config_value(self, service, name, cluster=None):
configs = self.c_helper.get_plugin_configs()
return self._get_config_value(service, name, configs, cluster)

View File

@ -54,11 +54,3 @@ class PluginUtilsV5(pu.AbstractPluginUtils):
'sudo su - -c "hadoop fs -mkdir -p /tmp/hive-hive" hdfs')
r.execute_command(
'sudo su - -c "hadoop fs -chown hive /tmp/hive-hive" hdfs')
def start_cloudera_manager(self, cluster):
self._start_cloudera_manager(
cluster, self.c_helper.AWAIT_MANAGER_STARTING_TIMEOUT)
def get_config_value(self, service, name, cluster=None):
configs = self.c_helper.get_plugin_configs()
return self._get_config_value(service, name, configs, cluster)

View File

@ -128,11 +128,3 @@ class PluginUtilsV530(pu.AbstractPluginUtils):
manager = self.get_manager(cluster)
with manager.remote() as r:
self.db_helper.create_sentry_database(cluster, r)
def start_cloudera_manager(self, cluster):
self._start_cloudera_manager(
cluster, self.c_helper.AWAIT_MANAGER_STARTING_TIMEOUT)
def get_config_value(self, service, name, cluster=None):
configs = self.c_helper.get_plugin_configs()
return self._get_config_value(service, name, configs, cluster)

View File

@ -148,14 +148,6 @@ class PluginUtilsV540(pu.AbstractPluginUtils):
with manager.remote() as r:
self.db_helper.create_sentry_database(cluster, r)
def start_cloudera_manager(self, cluster):
self._start_cloudera_manager(
cluster, self.c_helper.AWAIT_MANAGER_STARTING_TIMEOUT)
def get_config_value(self, service, name, cluster=None):
configs = self.c_helper.get_plugin_configs()
return self._get_config_value(service, name, configs, cluster)
def _configure_repo_from_inst(self, instance):
super(PluginUtilsV540, self)._configure_repo_from_inst(instance)

View File

@ -143,14 +143,6 @@ class PluginUtilsV550(pu.AbstractPluginUtils):
with manager.remote() as r:
self.db_helper.create_sentry_database(cluster, r)
def start_cloudera_manager(self, cluster):
self._start_cloudera_manager(
cluster, self.c_helper.AWAIT_MANAGER_STARTING_TIMEOUT)
def get_config_value(self, service, name, cluster=None):
configs = self.c_helper.get_plugin_configs()
return self._get_config_value(service, name, configs, cluster)
def _configure_repo_from_inst(self, instance):
super(PluginUtilsV550, self)._configure_repo_from_inst(instance)

View File

@ -140,18 +140,6 @@ class TestPluginUtils(b.SaharaTestCase):
' /user/hdfs/conf/hive-site.xml" hdfs')]
r.execute_command.assert_has_calls(calls, any_order=False)
@mock.patch('sahara.config.CONF.disable_event_log')
def test_create_hive_hive_directory(self, log_cfg):
cluster = get_concrete_cluster()
namenode = cluster.node_groups[1].instances[0]
self.plug_utils.create_hive_hive_directory(cluster)
with namenode.remote() as r:
calls = [mock.call('sudo su - -c "hadoop fs -mkdir -p'
' /tmp/hive-hive" hdfs'),
mock.call('sudo su - -c "hadoop fs -chown hive'
' /tmp/hive-hive" hdfs')]
r.execute_command.assert_has_calls(calls, any_order=False)
@mock.patch('sahara.config.CONF.disable_event_log')
def test_configure_swift(self, log_cfg):

View File

@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.cdh.v5 import plugin_utils as pu
from sahara.tests.unit.plugins.cdh import base_plugin_utils_test
@ -23,3 +25,15 @@ class TestPluginUtilsV5(base_plugin_utils_test.TestPluginUtils):
super(TestPluginUtilsV5, self).setUp()
self.plug_utils = pu.PluginUtilsV5()
self.version = "v5"
@mock.patch('sahara.config.CONF.disable_event_log')
def test_create_hive_hive_directory(self, log_cfg):
cluster = base_plugin_utils_test.get_concrete_cluster()
namenode = cluster.node_groups[1].instances[0]
self.plug_utils.create_hive_hive_directory(cluster)
with namenode.remote() as r:
calls = [mock.call('sudo su - -c "hadoop fs -mkdir -p'
' /tmp/hive-hive" hdfs'),
mock.call('sudo su - -c "hadoop fs -chown hive'
' /tmp/hive-hive" hdfs')]
r.execute_command.assert_has_calls(calls, any_order=False)