Merge "Fix working Spark with cinder volumes"

This commit is contained in:
Jenkins 2014-10-05 22:21:57 +00:00 committed by Gerrit Code Review
commit 4d150db7d5
3 changed files with 34 additions and 3 deletions

View File

@ -379,9 +379,13 @@ def extract_name_values(configs):
return dict((cfg['name'], cfg['value']) for cfg in configs)
def make_hadoop_path(base_dirs, suffix):
return [base_dir + suffix for base_dir in base_dirs]
def extract_hadoop_path(lst, hadoop_dir):
if lst:
return ",".join([p + hadoop_dir for p in lst])
return ",".join(make_hadoop_path(lst, hadoop_dir))
def _set_config(cfg, gen_cfg, name=None):

View File

@ -232,8 +232,10 @@ class SparkProvider(p.ProvisioningPluginBase):
'sudo chmod 600 $HOME/.ssh/id_rsa')
storage_paths = instance.node_group.storage_paths()
dn_path = c_helper.extract_hadoop_path(storage_paths, '/dfs/dn')
nn_path = c_helper.extract_hadoop_path(storage_paths, '/dfs/nn')
dn_path = ' '.join(c_helper.make_hadoop_path(storage_paths,
'/dfs/dn'))
nn_path = ' '.join(c_helper.make_hadoop_path(storage_paths,
'/dfs/nn'))
hdfs_dir_cmd = ('sudo mkdir -p %(nn_path)s %(dn_path)s &&'
'sudo chown -R hdfs:hadoop %(nn_path)s %(dn_path)s &&'

View File

@ -0,0 +1,25 @@
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.plugins.spark import config_helper as c_helper
from sahara.tests.unit import base as test_base
class ConfigHelperUtilsTest(test_base.SaharaTestCase):
def test_make_hadoop_path(self):
storage_paths = ['/mnt/one', '/mnt/two']
paths = c_helper.make_hadoop_path(storage_paths, '/spam')
expected = ['/mnt/one/spam', '/mnt/two/spam']
self.assertEqual(expected, paths)