Display credentials info in cluster general info
- credentials for services that require authentication and admin user credentials are displayed in cluster general info. - admin user password is stored in key manager Change-Id: Idab9d1d17048ca40dfe308708ecd70aedf9f7914 Closes-Bug: #1566907
This commit is contained in:
parent
9a1300b1a7
commit
8c7cfb76c2
@ -26,6 +26,7 @@ from sahara.i18n import _LI
|
|||||||
from sahara.i18n import _LW
|
from sahara.i18n import _LW
|
||||||
import sahara.plugins.mapr.abstract.configurer as ac
|
import sahara.plugins.mapr.abstract.configurer as ac
|
||||||
from sahara.plugins.mapr.domain import distro as d
|
from sahara.plugins.mapr.domain import distro as d
|
||||||
|
from sahara.plugins.mapr.domain import service as srvc
|
||||||
import sahara.plugins.mapr.services.management.management as mng
|
import sahara.plugins.mapr.services.management.management as mng
|
||||||
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||||
from sahara.plugins.mapr.services.maprfs import maprfs
|
from sahara.plugins.mapr.services.maprfs import maprfs
|
||||||
@ -33,11 +34,14 @@ from sahara.plugins.mapr.services.mysql import mysql
|
|||||||
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
||||||
from sahara.plugins.mapr.util import event_log as el
|
from sahara.plugins.mapr.util import event_log as el
|
||||||
import sahara.plugins.mapr.util.general as util
|
import sahara.plugins.mapr.util.general as util
|
||||||
|
import sahara.plugins.mapr.util.password_utils as pu
|
||||||
import sahara.utils.files as files
|
import sahara.utils.files as files
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
conductor = conductor.API
|
conductor = conductor.API
|
||||||
|
|
||||||
|
|
||||||
|
_MAPR_GROUP_NAME = 'mapr'
|
||||||
_MAPR_HOME = '/opt/mapr'
|
_MAPR_HOME = '/opt/mapr'
|
||||||
_JAVA_HOME = '/usr/java/jdk1.7.0_51'
|
_JAVA_HOME = '/usr/java/jdk1.7.0_51'
|
||||||
_CONFIGURE_SH_TIMEOUT = 600
|
_CONFIGURE_SH_TIMEOUT = 600
|
||||||
@ -233,9 +237,12 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
|
|
||||||
def _update_cluster_info(self, cluster_context):
|
def _update_cluster_info(self, cluster_context):
|
||||||
LOG.debug('Updating UI information.')
|
LOG.debug('Updating UI information.')
|
||||||
info = {}
|
info = {'Admin user credentials': {'Username': pu.MAPR_USER_NAME,
|
||||||
|
'Password': pu.get_mapr_password
|
||||||
|
(cluster_context.cluster)}}
|
||||||
for service in cluster_context.cluster_services:
|
for service in cluster_context.cluster_services:
|
||||||
for title, node_process, url_template in service.ui_info:
|
for title, node_process, ui_info in (service.get_ui_info
|
||||||
|
(cluster_context)):
|
||||||
removed = cluster_context.removed_instances(node_process)
|
removed = cluster_context.removed_instances(node_process)
|
||||||
instances = cluster_context.get_instances(node_process)
|
instances = cluster_context.get_instances(node_process)
|
||||||
instances = [i for i in instances if i not in removed]
|
instances = [i for i in instances if i not in removed]
|
||||||
@ -248,36 +255,45 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
for index, instance in enumerate(instances, start=1):
|
for index, instance in enumerate(instances, start=1):
|
||||||
args = {"title": title, "index": index}
|
args = {"title": title, "index": index}
|
||||||
display_name = display_name_template % args
|
display_name = display_name_template % args
|
||||||
url = url_template % instance.management_ip
|
data = ui_info.copy()
|
||||||
info.update({display_name: {"WebUI": url}})
|
data[srvc.SERVICE_UI] = (data[srvc.SERVICE_UI] %
|
||||||
|
instance.management_ip)
|
||||||
|
info.update({display_name: data})
|
||||||
|
|
||||||
ctx = context.ctx()
|
ctx = context.ctx()
|
||||||
conductor.cluster_update(ctx, cluster_context.cluster, {'info': info})
|
conductor.cluster_update(ctx, cluster_context.cluster, {'info': info})
|
||||||
|
|
||||||
def configure_general_environment(self, cluster_context, instances=None):
|
def configure_general_environment(self, cluster_context, instances=None):
|
||||||
LOG.debug('Executing post configure hooks')
|
LOG.debug('Executing post configure hooks')
|
||||||
|
mapr_user_pass = pu.get_mapr_password(cluster_context.cluster)
|
||||||
|
|
||||||
if not instances:
|
if not instances:
|
||||||
instances = cluster_context.get_instances()
|
instances = cluster_context.get_instances()
|
||||||
|
|
||||||
def create_user(instance):
|
def create_user(instance):
|
||||||
return util.run_script(instance, ADD_MAPR_USER, "root")
|
return util.run_script(instance, ADD_MAPR_USER, "root",
|
||||||
|
pu.MAPR_USER_NAME, mapr_user_pass)
|
||||||
|
|
||||||
def set_user_password(instance):
|
def set_user_password(instance):
|
||||||
LOG.debug('Setting password for user "mapr"')
|
LOG.debug('Setting password for user "%s"' % pu.MAPR_USER_NAME)
|
||||||
if self.mapr_user_exists(instance):
|
if self.mapr_user_exists(instance):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
r.execute_command(
|
r.execute_command(
|
||||||
'echo "%s:%s"|chpasswd' % ('mapr', 'mapr'),
|
'echo "%s:%s"|chpasswd' %
|
||||||
|
(pu.MAPR_USER_NAME, mapr_user_pass),
|
||||||
run_as_root=True)
|
run_as_root=True)
|
||||||
else:
|
else:
|
||||||
LOG.warning(_LW('User "mapr" does not exists'))
|
LOG.warning(_LW('User "mapr" does not exists'))
|
||||||
|
|
||||||
def create_home_mapr(instance):
|
def create_home_mapr(instance):
|
||||||
target_path = '/home/mapr'
|
target_path = '/home/%s' % pu.MAPR_USER_NAME
|
||||||
LOG.debug("Creating home directory for user 'mapr'")
|
LOG.debug("Creating home directory for user '%s'" %
|
||||||
args = {'path': target_path}
|
pu.MAPR_USER_NAME)
|
||||||
cmd = 'mkdir -p %(path)s && chown mapr:mapr %(path)s' % args
|
args = {'path': target_path,
|
||||||
|
'user': pu.MAPR_USER_NAME,
|
||||||
|
'group': _MAPR_GROUP_NAME}
|
||||||
|
cmd = ('mkdir -p %(path)s && chown %(user)s:%(group)s %(path)s'
|
||||||
|
% args)
|
||||||
if self.mapr_user_exists(instance):
|
if self.mapr_user_exists(instance):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
r.execute_command(cmd, run_as_root=True)
|
r.execute_command(cmd, run_as_root=True)
|
||||||
@ -335,7 +351,8 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
def mapr_user_exists(self, instance):
|
def mapr_user_exists(self, instance):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
ec, __ = r.execute_command(
|
ec, __ = r.execute_command(
|
||||||
"id -u mapr", run_as_root=True, raise_when_error=False)
|
"id -u %s" %
|
||||||
|
pu.MAPR_USER_NAME, run_as_root=True, raise_when_error=False)
|
||||||
return ec == 0
|
return ec == 0
|
||||||
|
|
||||||
def post_start(self, c_context, instances=None):
|
def post_start(self, c_context, instances=None):
|
||||||
@ -356,7 +373,8 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
|
|
||||||
@el.provision_event()
|
@el.provision_event()
|
||||||
def set_cluster_mode(instance):
|
def set_cluster_mode(instance):
|
||||||
return util.execute_command([instance], command, run_as="mapr")
|
return util.execute_command([instance], command,
|
||||||
|
run_as=pu.MAPR_USER_NAME)
|
||||||
|
|
||||||
util.execute_on_instances(instances, set_cluster_mode)
|
util.execute_on_instances(instances, set_cluster_mode)
|
||||||
|
|
||||||
|
@ -25,6 +25,8 @@ from sahara.plugins.mapr.util import service_utils as su
|
|||||||
import sahara.plugins.provisioning as p
|
import sahara.plugins.provisioning as p
|
||||||
from sahara.utils import files as files
|
from sahara.utils import files as files
|
||||||
|
|
||||||
|
SERVICE_UI = 'Web UI'
|
||||||
|
|
||||||
_INSTALL_PACKAGES_TIMEOUT = 3600
|
_INSTALL_PACKAGES_TIMEOUT = 3600
|
||||||
|
|
||||||
|
|
||||||
@ -62,10 +64,6 @@ class Service(object):
|
|||||||
def dependencies(self):
|
def dependencies(self):
|
||||||
return self._dependencies
|
return self._dependencies
|
||||||
|
|
||||||
@property
|
|
||||||
def ui_info(self):
|
|
||||||
return self._ui_info
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cluster_defaults(self):
|
def cluster_defaults(self):
|
||||||
return self._cluster_defaults
|
return self._cluster_defaults
|
||||||
@ -78,6 +76,9 @@ class Service(object):
|
|||||||
def validation_rules(self):
|
def validation_rules(self):
|
||||||
return self._validation_rules
|
return self._validation_rules
|
||||||
|
|
||||||
|
def get_ui_info(self, cluster_context):
|
||||||
|
return self._ui_info
|
||||||
|
|
||||||
def install(self, cluster_context, instances):
|
def install(self, cluster_context, instances):
|
||||||
g.execute_on_instances(instances, self._install_packages_on_instance,
|
g.execute_on_instances(instances, self._install_packages_on_instance,
|
||||||
cluster_context)
|
cluster_context)
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
useradd -p `openssl passwd -1 mapr` mapr
|
useradd -p `openssl passwd -1 $2` $1
|
||||||
|
@ -32,7 +32,7 @@ class Drill(s.Service):
|
|||||||
self._name = 'drill'
|
self._name = 'drill'
|
||||||
self._ui_name = 'Drill'
|
self._ui_name = 'Drill'
|
||||||
self._node_processes = [DRILL]
|
self._node_processes = [DRILL]
|
||||||
self._ui_info = [('Drill', DRILL, 'http://%s:8047')]
|
self._ui_info = [('Drill', DRILL, {s.SERVICE_UI: 'http://%s:8047'})]
|
||||||
self._validation_rules = [vu.at_least(1, DRILL)]
|
self._validation_rules = [vu.at_least(1, DRILL)]
|
||||||
|
|
||||||
def install(self, cluster_context, instances):
|
def install(self, cluster_context, instances):
|
||||||
|
@ -61,7 +61,7 @@ class HBase(s.Service):
|
|||||||
vu.at_least(1, HBASE_REGION_SERVER),
|
vu.at_least(1, HBASE_REGION_SERVER),
|
||||||
]
|
]
|
||||||
self._ui_info = [
|
self._ui_info = [
|
||||||
("HBase Master", HBASE_MASTER, "http://%s:60010"),
|
("HBase Master", HBASE_MASTER, {s.SERVICE_UI: "http://%s:60010"}),
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_config_files(self, cluster_context, configs, instance=None):
|
def get_config_files(self, cluster_context, configs, instance=None):
|
||||||
@ -94,7 +94,7 @@ class HBaseV0989(HBase):
|
|||||||
self._dependencies = [('mapr-hbase', self.version)]
|
self._dependencies = [('mapr-hbase', self.version)]
|
||||||
self._node_processes.append(HBASE_REST)
|
self._node_processes.append(HBASE_REST)
|
||||||
self._ui_info.append(
|
self._ui_info.append(
|
||||||
("HBase REST", HBASE_REST, "http://%s:8085"),
|
("HBase REST", HBASE_REST, {s.SERVICE_UI: "http://%s:8085"}),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -105,5 +105,5 @@ class HBaseV09812(HBase):
|
|||||||
self._dependencies = [("mapr-hbase", self.version)]
|
self._dependencies = [("mapr-hbase", self.version)]
|
||||||
self._node_processes.append(HBASE_REST)
|
self._node_processes.append(HBASE_REST)
|
||||||
self._ui_info.append(
|
self._ui_info.append(
|
||||||
("HBase REST", HBASE_REST, "http://%s:8085"),
|
("HBase REST", HBASE_REST, {s.SERVICE_UI: "http://%s:8085"}),
|
||||||
)
|
)
|
||||||
|
@ -34,6 +34,7 @@ import sahara.plugins.mapr.services.spark.spark as spark
|
|||||||
import sahara.plugins.mapr.services.sqoop.sqoop2 as sqoop
|
import sahara.plugins.mapr.services.sqoop.sqoop2 as sqoop
|
||||||
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
||||||
import sahara.plugins.mapr.util.general as g
|
import sahara.plugins.mapr.util.general as g
|
||||||
|
import sahara.plugins.mapr.util.password_utils as pu
|
||||||
from sahara.plugins.mapr.util import service_utils as su
|
from sahara.plugins.mapr.util import service_utils as su
|
||||||
import sahara.plugins.mapr.util.validation_utils as vu
|
import sahara.plugins.mapr.util.validation_utils as vu
|
||||||
import sahara.plugins.provisioning as p
|
import sahara.plugins.provisioning as p
|
||||||
@ -74,7 +75,7 @@ class Hue(s.Service):
|
|||||||
self._name = 'hue'
|
self._name = 'hue'
|
||||||
self._ui_name = 'Hue'
|
self._ui_name = 'Hue'
|
||||||
self._node_processes = [HUE]
|
self._node_processes = [HUE]
|
||||||
self._ui_info = [('HUE', HUE, 'http://%s:8888')]
|
self._ui_info = None
|
||||||
self._validation_rules = [
|
self._validation_rules = [
|
||||||
vu.exactly(1, HUE),
|
vu.exactly(1, HUE),
|
||||||
vu.on_same_node(HUE, httpfs.HTTP_FS),
|
vu.on_same_node(HUE, httpfs.HTTP_FS),
|
||||||
@ -82,6 +83,13 @@ class Hue(s.Service):
|
|||||||
]
|
]
|
||||||
self._priority = 2
|
self._priority = 2
|
||||||
|
|
||||||
|
def get_ui_info(self, cluster_context):
|
||||||
|
# Hue uses credentials of the administrative user (PAM auth)
|
||||||
|
return [('HUE', HUE, {s.SERVICE_UI: 'http://%s:8888',
|
||||||
|
'Username': pu.MAPR_USER_NAME,
|
||||||
|
'Password': pu.get_mapr_password(cluster_context
|
||||||
|
.cluster)})]
|
||||||
|
|
||||||
def get_configs(self):
|
def get_configs(self):
|
||||||
return [Hue.THRIFT_VERSION]
|
return [Hue.THRIFT_VERSION]
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
import sahara.plugins.mapr.domain.node_process as np
|
import sahara.plugins.mapr.domain.node_process as np
|
||||||
import sahara.plugins.mapr.domain.service as s
|
import sahara.plugins.mapr.domain.service as s
|
||||||
import sahara.plugins.mapr.util.commands as cmd
|
import sahara.plugins.mapr.util.commands as cmd
|
||||||
|
import sahara.plugins.mapr.util.password_utils as pu
|
||||||
import sahara.plugins.mapr.util.validation_utils as vu
|
import sahara.plugins.mapr.util.validation_utils as vu
|
||||||
|
|
||||||
|
|
||||||
@ -50,9 +51,8 @@ class Management(s.Service):
|
|||||||
super(Management, self).__init__()
|
super(Management, self).__init__()
|
||||||
self._ui_name = 'Management'
|
self._ui_name = 'Management'
|
||||||
self._node_processes = [ZOOKEEPER, WEB_SERVER, METRICS]
|
self._node_processes = [ZOOKEEPER, WEB_SERVER, METRICS]
|
||||||
self._ui_info = [
|
|
||||||
('MapR Control System (MCS)', WEB_SERVER, 'https://%s:8443'),
|
self._ui_info = None
|
||||||
]
|
|
||||||
self._validation_rules = [
|
self._validation_rules = [
|
||||||
vu.at_least(1, ZOOKEEPER),
|
vu.at_least(1, ZOOKEEPER),
|
||||||
vu.at_least(1, WEB_SERVER),
|
vu.at_least(1, WEB_SERVER),
|
||||||
@ -62,3 +62,10 @@ class Management(s.Service):
|
|||||||
def post_install(self, cluster_context, instances):
|
def post_install(self, cluster_context, instances):
|
||||||
instance = cluster_context.get_instance(WEB_SERVER)
|
instance = cluster_context.get_instance(WEB_SERVER)
|
||||||
cmd.chown(instance, 'mapr:mapr', self.SSL_KEYSTORE)
|
cmd.chown(instance, 'mapr:mapr', self.SSL_KEYSTORE)
|
||||||
|
|
||||||
|
def get_ui_info(self, cluster_context):
|
||||||
|
# MCS uses credentials of the administrative user (PAM auth)
|
||||||
|
return [('MapR Control System (MCS)', WEB_SERVER,
|
||||||
|
{s.SERVICE_UI: 'https://%s:8443',
|
||||||
|
'Username': pu.MAPR_USER_NAME,
|
||||||
|
'Password': pu.get_mapr_password(cluster_context.cluster)})]
|
||||||
|
@ -43,8 +43,8 @@ class MapReduce(s.Service):
|
|||||||
self._version = '0.20.2'
|
self._version = '0.20.2'
|
||||||
self._node_processes = [JOB_TRACKER, TASK_TRACKER]
|
self._node_processes = [JOB_TRACKER, TASK_TRACKER]
|
||||||
self._ui_info = [
|
self._ui_info = [
|
||||||
('JobTracker', JOB_TRACKER, 'http://%s:50030'),
|
('JobTracker', JOB_TRACKER, {s.SERVICE_UI: 'http://%s:50030'}),
|
||||||
('TaskTracker', TASK_TRACKER, 'http://%s:50060'),
|
('TaskTracker', TASK_TRACKER, {s.SERVICE_UI: 'http://%s:50060'}),
|
||||||
]
|
]
|
||||||
self._validation_rules = [
|
self._validation_rules = [
|
||||||
vu.at_least(1, JOB_TRACKER),
|
vu.at_least(1, JOB_TRACKER),
|
||||||
|
@ -82,7 +82,8 @@ class MapRFS(s.Service):
|
|||||||
self._ui_name = 'MapRFS'
|
self._ui_name = 'MapRFS'
|
||||||
self._node_processes = [CLDB, FILE_SERVER, NFS]
|
self._node_processes = [CLDB, FILE_SERVER, NFS]
|
||||||
self._ui_info = [
|
self._ui_info = [
|
||||||
('Container Location Database (CLDB)', CLDB, 'http://%s:7221'),
|
('Container Location Database (CLDB)', CLDB,
|
||||||
|
{s.SERVICE_UI: 'http://%s:7221'}),
|
||||||
]
|
]
|
||||||
self._validation_rules = [
|
self._validation_rules = [
|
||||||
vu.at_least(1, CLDB),
|
vu.at_least(1, CLDB),
|
||||||
|
@ -42,7 +42,8 @@ class Oozie(s.Service):
|
|||||||
self._node_processes = [OOZIE]
|
self._node_processes = [OOZIE]
|
||||||
self._cluster_defaults = ['oozie-default.json']
|
self._cluster_defaults = ['oozie-default.json']
|
||||||
self._validation_rules = [vu.exactly(1, OOZIE)]
|
self._validation_rules = [vu.exactly(1, OOZIE)]
|
||||||
self._ui_info = [('Oozie', OOZIE, 'http://%s:11000/oozie')]
|
self._ui_info = [('Oozie', OOZIE,
|
||||||
|
{s.SERVICE_UI: 'http://%s:11000/oozie'})]
|
||||||
|
|
||||||
def libext_path(self):
|
def libext_path(self):
|
||||||
return '/opt/mapr/oozie/oozie-%s/oozie-server/lib/' % self.version
|
return '/opt/mapr/oozie/oozie-%s/oozie-server/lib/' % self.version
|
||||||
|
@ -122,9 +122,9 @@ class Spark(s.Service):
|
|||||||
self._dependencies = [('mapr-spark', self.version)]
|
self._dependencies = [('mapr-spark', self.version)]
|
||||||
self._ui_info = [
|
self._ui_info = [
|
||||||
('Spark Master', SPARK_MASTER,
|
('Spark Master', SPARK_MASTER,
|
||||||
'http://%%s:%s' % SPARK_MASTER_UI_PORT),
|
{s.SERVICE_UI: 'http://%%s:%s' % SPARK_MASTER_UI_PORT}),
|
||||||
('Spark History Server', SPARK_HISTORY_SERVER,
|
('Spark History Server', SPARK_HISTORY_SERVER,
|
||||||
'http://%%s:%s' % SPARK_HS_UI_PORT)]
|
{s.SERVICE_UI: 'http://%%s:%s' % SPARK_HS_UI_PORT})]
|
||||||
self._validation_rules = [
|
self._validation_rules = [
|
||||||
vu.exactly(1, SPARK_MASTER),
|
vu.exactly(1, SPARK_MASTER),
|
||||||
vu.exactly(1, SPARK_HISTORY_SERVER),
|
vu.exactly(1, SPARK_HISTORY_SERVER),
|
||||||
|
@ -50,9 +50,11 @@ class YARN(s.Service):
|
|||||||
self._ui_name = 'YARN'
|
self._ui_name = 'YARN'
|
||||||
self._node_processes = [RESOURCE_MANAGER, NODE_MANAGER, HISTORY_SERVER]
|
self._node_processes = [RESOURCE_MANAGER, NODE_MANAGER, HISTORY_SERVER]
|
||||||
self._ui_info = [
|
self._ui_info = [
|
||||||
('NodeManager', NODE_MANAGER, 'http://%s:8042'),
|
('NodeManager', NODE_MANAGER, {s.SERVICE_UI: 'http://%s:8042'}),
|
||||||
('ResourceManager', RESOURCE_MANAGER, 'http://%s:8088'),
|
('ResourceManager', RESOURCE_MANAGER,
|
||||||
('HistoryServer', RESOURCE_MANAGER, 'http://%s:19888'),
|
{s.SERVICE_UI: 'http://%s:8088'}),
|
||||||
|
('HistoryServer', RESOURCE_MANAGER,
|
||||||
|
{s.SERVICE_UI: 'http://%s:19888'}),
|
||||||
]
|
]
|
||||||
self._cluster_defaults = ['yarn-cluster.json']
|
self._cluster_defaults = ['yarn-cluster.json']
|
||||||
self._node_defaults = ['yarn-node.json']
|
self._node_defaults = ['yarn-node.json']
|
||||||
|
71
sahara/plugins/mapr/util/password_utils.py
Normal file
71
sahara/plugins/mapr/util/password_utils.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# Copyright (c) 2016 Intel Corporation.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from sahara import conductor
|
||||||
|
from sahara import context
|
||||||
|
from sahara.service.castellan import utils as key_manager
|
||||||
|
|
||||||
|
MAPR_USER_NAME = 'mapr'
|
||||||
|
MAPR_USER_PASSWORD = 'mapr_password'
|
||||||
|
|
||||||
|
conductor = conductor.API
|
||||||
|
|
||||||
|
|
||||||
|
def delete_password(cluster, pw_name):
|
||||||
|
"""delete the named password from the key manager
|
||||||
|
|
||||||
|
This function will lookup the named password in the cluster entry
|
||||||
|
and delete it from the key manager.
|
||||||
|
|
||||||
|
:param cluster: The cluster record containing the password
|
||||||
|
:param pw_name: The name associated with the password
|
||||||
|
"""
|
||||||
|
ctx = context.ctx()
|
||||||
|
cluster = conductor.cluster_get(ctx, cluster.id)
|
||||||
|
key_id = cluster.extra.get(pw_name) if cluster.extra else None
|
||||||
|
if key_id is not None:
|
||||||
|
key_manager.delete_key(key_id, ctx)
|
||||||
|
|
||||||
|
|
||||||
|
def get_password(cluster, pw_name):
|
||||||
|
"""return a password for the named entry
|
||||||
|
|
||||||
|
This function will return, or create and return, a password for the
|
||||||
|
named entry. It will store the password in the key manager and use
|
||||||
|
the ID in the database entry.
|
||||||
|
|
||||||
|
:param cluster: The cluster record containing the password
|
||||||
|
:param pw_name: The entry name associated with the password
|
||||||
|
:returns: The cleartext password
|
||||||
|
"""
|
||||||
|
ctx = context.ctx()
|
||||||
|
cluster = conductor.cluster_get(ctx, cluster.id)
|
||||||
|
passwd = cluster.extra.get(pw_name) if cluster.extra else None
|
||||||
|
if passwd:
|
||||||
|
return key_manager.get_secret(passwd, ctx)
|
||||||
|
|
||||||
|
passwd = six.text_type(uuid.uuid4())
|
||||||
|
extra = cluster.extra.to_dict() if cluster.extra else {}
|
||||||
|
extra[pw_name] = key_manager.store_secret(passwd, ctx)
|
||||||
|
cluster = conductor.cluster_update(ctx, cluster, {'extra': extra})
|
||||||
|
return passwd
|
||||||
|
|
||||||
|
|
||||||
|
def get_mapr_password(cluster):
|
||||||
|
return get_password(cluster, MAPR_USER_PASSWORD)
|
Loading…
Reference in New Issue
Block a user