Add hadoop openstack swift jar to ambari cluster
This patch adds our custom hadoop swiftfs implementation to Ambari
cluster instances after their start in order to allow usage of
swift with Keystone API v3.
p.s. required jar file should be previously saved in the
/opt folder of the base images.
Closes-bug: 1558064
Change-Id: Ie6df4a542a16b4417b505b4a621e8b4b921364d3
This commit is contained in:
@@ -22,6 +22,7 @@ from oslo_utils import uuidutils
|
||||
|
||||
from sahara import conductor
|
||||
from sahara import context
|
||||
from sahara.i18n import _LW
|
||||
from sahara.plugins.ambari import client as ambari_client
|
||||
from sahara.plugins.ambari import common as p_common
|
||||
from sahara.plugins.ambari import configs
|
||||
@@ -388,3 +389,28 @@ def _wait_all_processes_removed(cluster, instance):
|
||||
if not hdp_processes:
|
||||
return
|
||||
context.sleep(5)
|
||||
|
||||
|
||||
def add_hadoop_swift_jar(instances):
|
||||
new_jar = "/opt/hadoop-openstack.jar"
|
||||
for inst in instances:
|
||||
with inst.remote() as r:
|
||||
code, out = r.execute_command("test -f %s" % new_jar,
|
||||
raise_when_error=False)
|
||||
if code == 0:
|
||||
# get ambari hadoop version (e.g.: 2.7.1.2.3.4.0-3485)
|
||||
code, amb_hadoop_version = r.execute_command(
|
||||
"sudo hadoop version | grep 'Hadoop' | awk '{print $2}'")
|
||||
amb_hadoop_version = amb_hadoop_version.strip()
|
||||
# get special code of ambari hadoop version(e.g.:2.3.4.0-3485)
|
||||
amb_code = '.'.join(amb_hadoop_version.split('.')[3:])
|
||||
origin_jar = (
|
||||
"/usr/hdp/%s/hadoop-mapreduce/hadoop-openstack-%s.jar" % (
|
||||
amb_code, amb_hadoop_version))
|
||||
r.execute_command("sudo cp %s %s" % (new_jar, origin_jar))
|
||||
else:
|
||||
LOG.warning(_LW("The {jar_file} file cannot be found "
|
||||
"in the {dir} directory so Keystone API v3 "
|
||||
"is not enabled for this cluster.")
|
||||
.format(jar_file="hadoop-openstack.jar",
|
||||
dir="/opt"))
|
||||
|
||||
@@ -89,7 +89,9 @@ class AmbariPluginProvider(p.ProvisioningPluginBase):
|
||||
def start_cluster(self, cluster):
|
||||
self._set_cluster_info(cluster)
|
||||
deploy.start_cluster(cluster)
|
||||
swift_helper.install_ssl_certs(plugin_utils.get_instances(cluster))
|
||||
cluster_instances = plugin_utils.get_instances(cluster)
|
||||
swift_helper.install_ssl_certs(cluster_instances)
|
||||
deploy.add_hadoop_swift_jar(cluster_instances)
|
||||
|
||||
def _set_cluster_info(self, cluster):
|
||||
ambari_ip = plugin_utils.get_instance(
|
||||
@@ -180,6 +182,7 @@ class AmbariPluginProvider(p.ProvisioningPluginBase):
|
||||
deploy.manage_config_groups(cluster, instances)
|
||||
deploy.manage_host_components(cluster, instances)
|
||||
swift_helper.install_ssl_certs(instances)
|
||||
deploy.add_hadoop_swift_jar(instances)
|
||||
|
||||
def decommission_nodes(self, cluster, instances):
|
||||
deploy.decommission_hosts(cluster, instances)
|
||||
|
||||
@@ -44,7 +44,7 @@ XML_CONFS = {
|
||||
}
|
||||
|
||||
_default_executor_classpath = ":".join(
|
||||
['/usr/lib/hadoop/hadoop-swift.jar'])
|
||||
['/usr/lib/hadoop-mapreduce/hadoop-openstack.jar'])
|
||||
|
||||
SPARK_CONFS = {
|
||||
'Spark': {
|
||||
|
||||
@@ -42,7 +42,7 @@ class TestSpark(base.SaharaTestCase):
|
||||
self.spark_pid = "12345"
|
||||
self.spark_home = "/opt/spark"
|
||||
self.workflow_dir = "/wfdir"
|
||||
self.driver_cp = "/usr/lib/hadoop/hadoop-swift.jar:"
|
||||
self.driver_cp = "/usr/lib/hadoop-mapreduce/hadoop-openstack.jar:"
|
||||
|
||||
def test_get_pid_and_inst_id(self):
|
||||
'''Test parsing of job ids
|
||||
|
||||
Reference in New Issue
Block a user