Fix problem with using volumes for HDFS data in vanilla plugin
Using 'dfs.namenode.data.dir' param wrong. Should use 'dfs'datanode.data.dir' for storing datanode data Change-Id: I3e07fcdca34919daad290acb97f9add2a73a6a4e Closes-bug: #1473041
This commit is contained in:
parent
a1c95bca60
commit
5a1acb2651
@ -99,7 +99,7 @@ def _get_hadoop_configs(pctx, node_group):
|
||||
},
|
||||
'HDFS': {
|
||||
'dfs.namenode.name.dir': ','.join(dirs['hadoop_name_dirs']),
|
||||
'dfs.namenode.data.dir': ','.join(dirs['hadoop_data_dirs']),
|
||||
'dfs.datanode.data.dir': ','.join(dirs['hadoop_data_dirs']),
|
||||
'dfs.hosts': '%s/dn-include' % HADOOP_CONF_DIR,
|
||||
'dfs.hosts.exclude': '%s/dn-exclude' % HADOOP_CONF_DIR
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user