Merge "Add bare images support for MapR plugin"
This commit is contained in:
commit
803ed597ac
@ -115,3 +115,43 @@ class AbstractClusterContext(object):
|
|||||||
@abc.abstractproperty
|
@abc.abstractproperty
|
||||||
def should_be_restarted(self):
|
def should_be_restarted(self):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def mapr_repos(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def is_prebuilt(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def local_repo(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def required_services(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def all_services(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def mapr_version(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def ubuntu_base_repo(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def ubuntu_ecosystem_repo(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def centos_base_repo(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def centos_ecosystem_repo(self):
|
||||||
|
return
|
||||||
|
@ -30,19 +30,21 @@ import sahara.plugins.mapr.services.yarn.yarn as yarn
|
|||||||
import sahara.plugins.mapr.util.general as util
|
import sahara.plugins.mapr.util.general as util
|
||||||
from sahara.topology import topology_helper as th
|
from sahara.topology import topology_helper as th
|
||||||
import sahara.utils.configs as sahara_configs
|
import sahara.utils.configs as sahara_configs
|
||||||
from sahara.utils import files as f
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
conductor = conductor.API
|
conductor = conductor.API
|
||||||
|
|
||||||
MAPR_REPO_DIR = '/opt/mapr-repository'
|
|
||||||
_MAPR_HOME = '/opt/mapr'
|
_MAPR_HOME = '/opt/mapr'
|
||||||
_JAVA_HOME = '/usr/java/jdk1.7.0_51'
|
_JAVA_HOME = '/usr/java/jdk1.7.0_51'
|
||||||
_CONFIGURE_SH_TIMEOUT = 600
|
_CONFIGURE_SH_TIMEOUT = 600
|
||||||
_SET_MODE_CMD = 'maprcli cluster mapreduce set -mode '
|
_SET_MODE_CMD = 'maprcli cluster mapreduce set -mode '
|
||||||
|
|
||||||
_TOPO_SCRIPT = 'plugins/mapr/resources/topology.sh'
|
_TOPO_SCRIPT = 'plugins/mapr/resources/topology.sh'
|
||||||
|
INSTALL_JAVA_SCRIPT = 'plugins/mapr/resources/install_java.sh'
|
||||||
|
INSTALL_SCALA_SCRIPT = 'plugins/mapr/resources/install_scala.sh'
|
||||||
|
INSTALL_MYSQL_CLIENT = 'plugins/mapr/resources/install_mysql_client.sh'
|
||||||
|
ADD_MAPR_REPO_SCRIPT = 'plugins/mapr/resources/add_mapr_repo.sh'
|
||||||
|
|
||||||
SERVICE_INSTALL_PRIORITY = [
|
SERVICE_INSTALL_PRIORITY = [
|
||||||
mng.Management(),
|
mng.Management(),
|
||||||
@ -59,6 +61,8 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
instances = instances or cluster_context.get_instances()
|
instances = instances or cluster_context.get_instances()
|
||||||
self._configure_ssh_connection(cluster_context, instances)
|
self._configure_ssh_connection(cluster_context, instances)
|
||||||
self._install_mapr_repo(cluster_context, instances)
|
self._install_mapr_repo(cluster_context, instances)
|
||||||
|
if not cluster_context.is_prebuilt:
|
||||||
|
self._prepare_bare_image(cluster_context, instances)
|
||||||
self._install_services(cluster_context, instances)
|
self._install_services(cluster_context, instances)
|
||||||
self._configure_topology(cluster_context, instances)
|
self._configure_topology(cluster_context, instances)
|
||||||
self._configure_database(cluster_context, instances)
|
self._configure_database(cluster_context, instances)
|
||||||
@ -97,34 +101,39 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
|
|
||||||
return sorted(cluster_context.cluster_services, key=key, reverse=True)
|
return sorted(cluster_context.cluster_services, key=key, reverse=True)
|
||||||
|
|
||||||
|
def _prepare_bare_image(self, cluster_context, instances):
|
||||||
|
LOG.debug('Preparing bare image')
|
||||||
|
d_name = cluster_context.distro.name
|
||||||
|
|
||||||
|
LOG.debug('Installing Java')
|
||||||
|
util.execute_on_instances(
|
||||||
|
instances, util.run_script, INSTALL_JAVA_SCRIPT, 'root', d_name)
|
||||||
|
LOG.debug('Installing Scala')
|
||||||
|
util.execute_on_instances(
|
||||||
|
instances, util.run_script, INSTALL_SCALA_SCRIPT, 'root', d_name)
|
||||||
|
LOG.debug('Installing MySQL client')
|
||||||
|
util.execute_on_instances(
|
||||||
|
instances, util.run_script, INSTALL_MYSQL_CLIENT, 'root', d_name)
|
||||||
|
LOG.debug('Bare images successfully prepared')
|
||||||
|
|
||||||
def _configure_topology(self, context, instances):
|
def _configure_topology(self, context, instances):
|
||||||
|
def write_file(instance, path, data):
|
||||||
|
with instance.remote() as r:
|
||||||
|
r.write_file_to(path, data, run_as_root=True)
|
||||||
|
|
||||||
LOG.debug('Configuring cluster topology')
|
LOG.debug('Configuring cluster topology')
|
||||||
is_node_aware = context.is_node_aware
|
is_node_aware = context.is_node_aware
|
||||||
if is_node_aware:
|
if is_node_aware:
|
||||||
topo = th.generate_topology_map(context.cluster, is_node_aware)
|
topo = th.generate_topology_map(context.cluster, is_node_aware)
|
||||||
topo = '\n'.join(['%s %s' % i for i in six.iteritems(topo)])
|
topo = '\n'.join(['%s %s' % i for i in six.iteritems(topo)])
|
||||||
data_path = '%s/topology.data' % context.mapr_home
|
data_path = '%s/topology.data' % context.mapr_home
|
||||||
script_path = '%s/topology.sh' % context.mapr_home
|
util.execute_on_instances(instances, write_file, data_path, topo)
|
||||||
files = {
|
util.execute_on_instances(
|
||||||
data_path: topo,
|
instances, util.run_script, _TOPO_SCRIPT, 'root', data_path)
|
||||||
script_path: f.get_file_text(_TOPO_SCRIPT),
|
|
||||||
}
|
|
||||||
chmod_cmd = 'chmod +x %s' % script_path
|
|
||||||
for instance in instances:
|
|
||||||
with instance.remote() as r:
|
|
||||||
r.write_files_to(files, run_as_root=True)
|
|
||||||
r.execute_command(chmod_cmd, run_as_root=True)
|
|
||||||
else:
|
else:
|
||||||
LOG.debug('Data locality is disabled.')
|
LOG.debug('Data locality is disabled.')
|
||||||
LOG.debug('Cluster topology successfully configured')
|
LOG.debug('Cluster topology successfully configured')
|
||||||
|
|
||||||
def _execute_on_instances(self, function, cluster_context, instances,
|
|
||||||
**kwargs):
|
|
||||||
with context.ThreadGroup() as tg:
|
|
||||||
for instance in instances:
|
|
||||||
tg.spawn('%s-execution' % function.__name__,
|
|
||||||
function, instance, **kwargs)
|
|
||||||
|
|
||||||
def _write_config_files(self, cluster_context, instances):
|
def _write_config_files(self, cluster_context, instances):
|
||||||
LOG.debug('Writing config files')
|
LOG.debug('Writing config files')
|
||||||
|
|
||||||
@ -230,10 +239,8 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
else:
|
else:
|
||||||
LOG.debug('user "mapr" does not exists')
|
LOG.debug('user "mapr" does not exists')
|
||||||
|
|
||||||
self._execute_on_instances(set_user_password, cluster_context,
|
util.execute_on_instances(instances, set_user_password)
|
||||||
instances)
|
util.execute_on_instances(instances, create_home_mapr)
|
||||||
self._execute_on_instances(create_home_mapr, cluster_context,
|
|
||||||
instances)
|
|
||||||
|
|
||||||
def _configure_sh_cluster(self, cluster_context, instances):
|
def _configure_sh_cluster(self, cluster_context, instances):
|
||||||
LOG.debug('Executing configure.sh')
|
LOG.debug('Executing configure.sh')
|
||||||
@ -273,8 +280,7 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
r.execute_command(echo_param)
|
r.execute_command(echo_param)
|
||||||
r.execute_command(echo_timeout)
|
r.execute_command(echo_timeout)
|
||||||
|
|
||||||
self._execute_on_instances(keep_alive_connection,
|
util.execute_on_instances(instances, keep_alive_connection)
|
||||||
cluster_context, instances)
|
|
||||||
|
|
||||||
def mapr_user_exists(self, instance):
|
def mapr_user_exists(self, instance):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
@ -300,18 +306,10 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
|||||||
r.execute_command(cmd % cluster_mode)
|
r.execute_command(cmd % cluster_mode)
|
||||||
|
|
||||||
def _install_mapr_repo(self, cluster_context, instances):
|
def _install_mapr_repo(self, cluster_context, instances):
|
||||||
def add_repo(instance, **kwargs):
|
|
||||||
with instance.remote() as r:
|
|
||||||
script = '/tmp/repo_install.sh'
|
|
||||||
data = cluster_context.get_install_repo_script_data()
|
|
||||||
r.write_file_to(script, data, run_as_root=True)
|
|
||||||
r.execute_command('chmod +x %s' % script, run_as_root=True)
|
|
||||||
r.execute_command('%s %s' % (script, kwargs.get('distro')),
|
|
||||||
run_as_root=True, raise_when_error=False)
|
|
||||||
|
|
||||||
d_name = cluster_context.distro.name
|
d_name = cluster_context.distro.name
|
||||||
self._execute_on_instances(
|
util.execute_on_instances(
|
||||||
add_repo, cluster_context, instances, distro=d_name)
|
instances, util.run_script, ADD_MAPR_REPO_SCRIPT, 'root', d_name,
|
||||||
|
**cluster_context.mapr_repos)
|
||||||
|
|
||||||
def _update_services(self, c_context, instances):
|
def _update_services(self, c_context, instances):
|
||||||
for service in c_context.cluster_services:
|
for service in c_context.cluster_services:
|
||||||
|
@ -45,10 +45,13 @@ def _get_node_process_name(node_process):
|
|||||||
|
|
||||||
|
|
||||||
class BaseClusterContext(cc.AbstractClusterContext):
|
class BaseClusterContext(cc.AbstractClusterContext):
|
||||||
|
ubuntu_base = 'http://package.mapr.com/releases/v%s/ubuntu/ mapr optional'
|
||||||
|
centos_base = 'http://package.mapr.com/releases/v%s/redhat/'
|
||||||
|
|
||||||
def __init__(self, cluster, version_handler, added=None, removed=None):
|
def __init__(self, cluster, version_handler, added=None, removed=None):
|
||||||
self._cluster = cluster
|
self._cluster = cluster
|
||||||
self._distro = None
|
self._distro = None
|
||||||
self.all_services_list = version_handler.get_services()
|
self._all_services = version_handler.get_services()
|
||||||
self._required_services = version_handler.get_required_services()
|
self._required_services = version_handler.get_required_services()
|
||||||
self._cluster_services = None
|
self._cluster_services = None
|
||||||
self._mapr_home = '/opt/mapr'
|
self._mapr_home = '/opt/mapr'
|
||||||
@ -71,6 +74,14 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
|||||||
self._existing_instances = [i for i in self.get_instances()
|
self._existing_instances = [i for i in self.get_instances()
|
||||||
if i not in self._changed_instances]
|
if i not in self._changed_instances]
|
||||||
self._restart = collections.defaultdict(list)
|
self._restart = collections.defaultdict(list)
|
||||||
|
self._ubuntu_base_repo = None
|
||||||
|
self._ubuntu_ecosystem_repo = None
|
||||||
|
self._centos_base_repo = None
|
||||||
|
self._centos_ecosystem_repo = None
|
||||||
|
self._repos = {}
|
||||||
|
self._is_prebuilt = None
|
||||||
|
self._local_repo = '/opt/mapr-repository'
|
||||||
|
self._mapr_version = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cluster(self):
|
def cluster(self):
|
||||||
@ -86,6 +97,10 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
|||||||
def required_services(self):
|
def required_services(self):
|
||||||
return self._required_services
|
return self._required_services
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all_services(self):
|
||||||
|
return self._all_services
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mapr_home(self):
|
def mapr_home(self):
|
||||||
return self._mapr_home
|
return self._mapr_home
|
||||||
@ -225,9 +240,6 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
|||||||
result.update(service.get_configs_dict())
|
result.update(service.get_configs_dict())
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_configure_sh_path(self):
|
|
||||||
return '/opt/mapr/server/configure.sh'
|
|
||||||
|
|
||||||
def get_chosen_service_version(self, service_name):
|
def get_chosen_service_version(self, service_name):
|
||||||
service_configs = self.cluster.cluster_configs.get(service_name, None)
|
service_configs = self.cluster.cluster_configs.get(service_name, None)
|
||||||
if not service_configs:
|
if not service_configs:
|
||||||
@ -259,7 +271,7 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
|||||||
return service
|
return service
|
||||||
|
|
||||||
def _find_service_instance(self, ui_name, version):
|
def _find_service_instance(self, ui_name, version):
|
||||||
for service in self.all_services_list:
|
for service in self.all_services:
|
||||||
if service.ui_name == ui_name:
|
if service.ui_name == ui_name:
|
||||||
if version is not None and service.version != version:
|
if version is not None and service.version != version:
|
||||||
continue
|
continue
|
||||||
@ -267,7 +279,7 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
|||||||
|
|
||||||
def get_service_name_by_node_process(self, node_process):
|
def get_service_name_by_node_process(self, node_process):
|
||||||
node_process = _get_node_process_name(node_process)
|
node_process = _get_node_process_name(node_process)
|
||||||
for service in self.all_services_list:
|
for service in self.all_services:
|
||||||
node_processes = [np.ui_name for np in service.node_processes]
|
node_processes = [np.ui_name for np in service.node_processes]
|
||||||
if node_process in node_processes:
|
if node_process in node_processes:
|
||||||
return service.ui_name
|
return service.ui_name
|
||||||
@ -337,3 +349,49 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
|||||||
@property
|
@property
|
||||||
def should_be_restarted(self):
|
def should_be_restarted(self):
|
||||||
return self._restart
|
return self._restart
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mapr_repos(self):
|
||||||
|
if not self._repos:
|
||||||
|
self._repos = {
|
||||||
|
"ubuntu_mapr_base_repo": self.ubuntu_base_repo,
|
||||||
|
"ubuntu_mapr_ecosystem_repo": self.ubuntu_ecosystem_repo,
|
||||||
|
"centos_mapr_base_repo": self.centos_base_repo,
|
||||||
|
"centos_mapr_ecosystem_repo": self.centos_ecosystem_repo,
|
||||||
|
}
|
||||||
|
return self._repos
|
||||||
|
|
||||||
|
@property
|
||||||
|
def local_repo(self):
|
||||||
|
return self._local_repo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_prebuilt(self):
|
||||||
|
if self._is_prebuilt is None:
|
||||||
|
self._is_prebuilt = g.is_directory(
|
||||||
|
self.some_instance, self.local_repo)
|
||||||
|
return self._is_prebuilt
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mapr_version(self):
|
||||||
|
return self._mapr_version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ubuntu_base_repo(self):
|
||||||
|
if not self._ubuntu_base_repo:
|
||||||
|
self._ubuntu_base_repo = self.ubuntu_base % self.mapr_version
|
||||||
|
return self._ubuntu_base_repo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ubuntu_ecosystem_repo(self):
|
||||||
|
return self._ubuntu_ecosystem_repo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def centos_base_repo(self):
|
||||||
|
if not self._centos_base_repo:
|
||||||
|
self._centos_base_repo = self.centos_base % self.mapr_version
|
||||||
|
return self._centos_base_repo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def centos_ecosystem_repo(self):
|
||||||
|
return self._centos_ecosystem_repo
|
||||||
|
@ -16,7 +16,7 @@ elif [ "$1" = 'CentOS' -o "$1" = 'RedHatEnterpriseServer' ]; then
|
|||||||
cat >> /etc/yum.repos.d/maprtech.repo << EOF
|
cat >> /etc/yum.repos.d/maprtech.repo << EOF
|
||||||
[maprtech]
|
[maprtech]
|
||||||
name=MapR Technologies
|
name=MapR Technologies
|
||||||
baseurl=%(centos_mapr_repo)s
|
baseurl=%(centos_mapr_base_repo)s
|
||||||
enabled=1
|
enabled=1
|
||||||
gpgcheck=0
|
gpgcheck=0
|
||||||
protect=1
|
protect=1
|
||||||
|
@ -12,7 +12,7 @@ mkdir -p $JAVA_HOME
|
|||||||
|
|
||||||
JAVA_FILE=$(basename $JAVA_DOWNLOAD_URL)
|
JAVA_FILE=$(basename $JAVA_DOWNLOAD_URL)
|
||||||
wget --no-check-certificate --no-cookies -c \
|
wget --no-check-certificate --no-cookies -c \
|
||||||
--header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" \
|
--header "Cookie: gpw_e24=http://www.oracle.com/; oraclelicense=accept-securebackup-cookie" \
|
||||||
-O $JAVA_HOME/$JAVA_FILE $JAVA_DOWNLOAD_URL
|
-O $JAVA_HOME/$JAVA_FILE $JAVA_DOWNLOAD_URL
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
echo "Java download successful"
|
echo "Java download successful"
|
||||||
|
12
sahara/plugins/mapr/resources/install_mysql_client.sh
Normal file
12
sahara/plugins/mapr/resources/install_mysql_client.sh
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [[ $1 == *"Ubuntu"* ]]; then
|
||||||
|
sudo apt-get install --force-yes -y mysql-client
|
||||||
|
elif [[ $1 == *"CentOS"* ]] || [[ $1 == *"Red Hat Enterprise Linux"* ]]; then
|
||||||
|
sudo yum install -y mysql
|
||||||
|
elif [[ $1 == *"SUSE"* ]]; then
|
||||||
|
sudo zypper install mysql-community-server-client
|
||||||
|
else
|
||||||
|
echo "Unknown distribution"
|
||||||
|
exit 1
|
||||||
|
fi
|
27
sahara/plugins/mapr/resources/install_scala.sh
Normal file
27
sahara/plugins/mapr/resources/install_scala.sh
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
#Current available version
|
||||||
|
DEF_VERSION="2.11.5"
|
||||||
|
|
||||||
|
VERSION="$(wget -qO- http://www.scala-lang.org|grep 'scala-version'|grep -Eo '([0-9]\.?)+')"
|
||||||
|
|
||||||
|
if [ $? != 0 -o -z ${VERSION} ]; then
|
||||||
|
VERSION=${DEF_VERSION}
|
||||||
|
fi
|
||||||
|
|
||||||
|
PKG=scala-${VERSION}
|
||||||
|
|
||||||
|
URL="http://downloads.typesafe.com/scala/${VERSION}"
|
||||||
|
|
||||||
|
if [ "$1" = "Ubuntu" ]; then
|
||||||
|
wget -N ${URL}/${PKG}.deb
|
||||||
|
dpkg -i ${PKG}.deb
|
||||||
|
rm ${PKG}.deb
|
||||||
|
# install java if missing
|
||||||
|
apt-get install -f -y --force-yes
|
||||||
|
elif [ "$1" = 'CentOS' -o "$1" = 'RedHatEnterpriseServer' ]; then
|
||||||
|
rpm -Uhv ${URL}/${PKG}.rpm
|
||||||
|
else
|
||||||
|
echo "Unknown distribution"
|
||||||
|
exit 1
|
||||||
|
fi
|
@ -21,6 +21,7 @@ from sahara.i18n import _
|
|||||||
import sahara.plugins.mapr.domain.configuration_file as bcf
|
import sahara.plugins.mapr.domain.configuration_file as bcf
|
||||||
import sahara.plugins.mapr.domain.node_process as np
|
import sahara.plugins.mapr.domain.node_process as np
|
||||||
import sahara.plugins.mapr.domain.service as s
|
import sahara.plugins.mapr.domain.service as s
|
||||||
|
import sahara.plugins.mapr.util.general as g
|
||||||
import sahara.plugins.mapr.util.validation_utils as vu
|
import sahara.plugins.mapr.util.validation_utils as vu
|
||||||
import sahara.plugins.provisioning as p
|
import sahara.plugins.provisioning as p
|
||||||
from sahara.utils import files
|
from sahara.utils import files
|
||||||
@ -103,14 +104,8 @@ class MapRFS(s.Service):
|
|||||||
|
|
||||||
def _generate_disk_list_file(self, instance, path_to_disk_setup_script):
|
def _generate_disk_list_file(self, instance, path_to_disk_setup_script):
|
||||||
LOG.debug('Creating disk list file')
|
LOG.debug('Creating disk list file')
|
||||||
script_path = '/tmp/disk_setup_script.sh'
|
g.run_script(instance, path_to_disk_setup_script, 'root',
|
||||||
with instance.remote() as r:
|
*instance.node_group.storage_paths())
|
||||||
r.write_file_to(
|
|
||||||
script_path, files.get_file_text(path_to_disk_setup_script))
|
|
||||||
r.execute_command('chmod +x ' + script_path, run_as_root=True)
|
|
||||||
args = ' '.join(instance.node_group.storage_paths())
|
|
||||||
cmd = '%s %s' % (script_path, args)
|
|
||||||
r.execute_command(cmd, run_as_root=True)
|
|
||||||
|
|
||||||
def _execute_disksetup(self, instance):
|
def _execute_disksetup(self, instance):
|
||||||
with instance.remote() as rmt:
|
with instance.remote() as rmt:
|
||||||
|
@ -21,6 +21,7 @@ import six
|
|||||||
import sahara.plugins.mapr.domain.configuration_file as cf
|
import sahara.plugins.mapr.domain.configuration_file as cf
|
||||||
import sahara.plugins.mapr.domain.service as s
|
import sahara.plugins.mapr.domain.service as s
|
||||||
import sahara.plugins.mapr.services.hive.hive as hive
|
import sahara.plugins.mapr.services.hive.hive as hive
|
||||||
|
import sahara.plugins.mapr.util.general as g
|
||||||
import sahara.utils.files as f
|
import sahara.utils.files as f
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -198,11 +199,4 @@ class MySQL(s.Service):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def install_mysql(instance, distro_name):
|
def install_mysql(instance, distro_name):
|
||||||
with instance.remote() as r:
|
g.run_script(instance, MySQL.MYSQL_INSTALL_SCRIPT, 'root', distro_name)
|
||||||
script = '/tmp/install_mysql.sh'
|
|
||||||
data = f.get_file_text(MySQL.MYSQL_INSTALL_SCRIPT)
|
|
||||||
r.write_file_to(script, data, run_as_root=True)
|
|
||||||
r.execute_command('chmod +x %s' % script, run_as_root=True)
|
|
||||||
r.execute_command('%s %s' % (script, distro_name),
|
|
||||||
run_as_root=True,
|
|
||||||
timeout=MySQL.INSTALL_PACKAGES_TIMEOUT)
|
|
||||||
|
@ -15,6 +15,9 @@
|
|||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
from sahara import context
|
||||||
|
import sahara.utils.files as files
|
||||||
|
|
||||||
|
|
||||||
def unique_list(iterable, mapper=lambda i: i):
|
def unique_list(iterable, mapper=lambda i: i):
|
||||||
result = []
|
result = []
|
||||||
@ -88,3 +91,21 @@ def copy(s_path, s_instance, d_path, d_instance, run_as=None):
|
|||||||
copy_dir(s_path, s_instance, d_path, d_instance, run_as)
|
copy_dir(s_path, s_instance, d_path, d_instance, run_as)
|
||||||
else:
|
else:
|
||||||
copy_file(s_path, s_instance, d_path, d_instance, run_as)
|
copy_file(s_path, s_instance, d_path, d_instance, run_as)
|
||||||
|
|
||||||
|
|
||||||
|
def run_script(instance, script, run_as=None, *args, **kwargs):
|
||||||
|
with instance.remote() as r:
|
||||||
|
path = '/tmp/%s.sh' % uuid.uuid4()
|
||||||
|
script = files.get_file_text(script) % kwargs
|
||||||
|
r.write_file_to(path, script, run_as_root=(run_as == 'root'))
|
||||||
|
r.execute_command(_run_as(run_as, 'chmod +x %s' % path))
|
||||||
|
r.execute_command(_run_as(run_as, '%s %s' % (path, ' '.join(args))))
|
||||||
|
# FIXME(aosadchyi): reuse existing remote
|
||||||
|
remove(instance, path, run_as=run_as)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_on_instances(instances, function, *args, **kwargs):
|
||||||
|
with context.ThreadGroup() as tg:
|
||||||
|
for instance in instances:
|
||||||
|
t_name = '%s-execution' % function.__name__
|
||||||
|
tg.spawn(t_name, function, instance, *args, **kwargs)
|
||||||
|
@ -16,18 +16,9 @@
|
|||||||
import sahara.plugins.mapr.base.base_cluster_context as bc
|
import sahara.plugins.mapr.base.base_cluster_context as bc
|
||||||
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||||
import sahara.plugins.mapr.services.maprfs.maprfs as maprfs
|
import sahara.plugins.mapr.services.maprfs.maprfs as maprfs
|
||||||
from sahara.utils import files as f
|
|
||||||
|
|
||||||
|
|
||||||
class Context(bc.BaseClusterContext):
|
class Context(bc.BaseClusterContext):
|
||||||
UBUNTU_MAPR_BASE_REPO = ('http://package.mapr.com/releases/v3.1.1/ubuntu/ '
|
|
||||||
'mapr optional')
|
|
||||||
UBUNTU_MAPR_ECOSYSTEM_REPO = ('http://package.mapr.com/releases/'
|
|
||||||
'ecosystem/ubuntu binary/')
|
|
||||||
CENTOS_MAPR_BASE_REPO = 'http://package.mapr.com/releases/v3.1.1/redhat/'
|
|
||||||
CENTOS_MAPR_ECOSYSTEM_REPO = ('http://package.mapr.com/releases/'
|
|
||||||
'ecosystem/redhat')
|
|
||||||
|
|
||||||
def __init__(self, cluster, version_handler, added=None, removed=None):
|
def __init__(self, cluster, version_handler, added=None, removed=None):
|
||||||
super(Context, self).__init__(cluster, version_handler, added, removed)
|
super(Context, self).__init__(cluster, version_handler, added, removed)
|
||||||
self._hadoop_version = mr.MapReduce().version
|
self._hadoop_version = mr.MapReduce().version
|
||||||
@ -36,25 +27,22 @@ class Context(bc.BaseClusterContext):
|
|||||||
self._resource_manager_uri = 'maprfs:///'
|
self._resource_manager_uri = 'maprfs:///'
|
||||||
self._cluster_mode = None
|
self._cluster_mode = None
|
||||||
self._node_aware = False
|
self._node_aware = False
|
||||||
|
self._mapr_version = '3.1.1'
|
||||||
|
self._ubuntu_ecosystem_repo = (
|
||||||
|
'http://package.mapr.com/releases/ecosystem/ubuntu binary/')
|
||||||
|
self._centos_ecosystem_repo = (
|
||||||
|
'http://package.mapr.com/releases/ecosystem/redhat')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hadoop_lib(self):
|
def hadoop_lib(self):
|
||||||
if not self._hadoop_lib:
|
if not self._hadoop_lib:
|
||||||
f = '%(hadoop_home)s/lib'
|
self._hadoop_lib = '%s/lib' % self.hadoop_home
|
||||||
args = {
|
|
||||||
'hadoop_home': self.hadoop_home,
|
|
||||||
}
|
|
||||||
self._hadoop_lib = f % args
|
|
||||||
return self._hadoop_lib
|
return self._hadoop_lib
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hadoop_conf(self):
|
def hadoop_conf(self):
|
||||||
if not self._hadoop_conf:
|
if not self._hadoop_conf:
|
||||||
f = '%(hadoop_home)s/conf'
|
self._hadoop_conf = '%s/conf' % self.hadoop_home
|
||||||
args = {
|
|
||||||
'hadoop_home': self.hadoop_home,
|
|
||||||
}
|
|
||||||
self._hadoop_conf = f % args
|
|
||||||
return self._hadoop_conf
|
return self._hadoop_conf
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -68,14 +56,3 @@ class Context(bc.BaseClusterContext):
|
|||||||
mapr_db = self._get_cluster_config_value(mapr_db)
|
mapr_db = self._get_cluster_config_value(mapr_db)
|
||||||
self._mapr_db = '-M7' if mapr_db else ''
|
self._mapr_db = '-M7' if mapr_db else ''
|
||||||
return self._mapr_db
|
return self._mapr_db
|
||||||
|
|
||||||
def get_install_repo_script_data(self):
|
|
||||||
script_template = 'plugins/mapr/resources/add_mapr_repo.sh'
|
|
||||||
script_template = f.get_file_text(script_template)
|
|
||||||
args = {
|
|
||||||
"ubuntu_mapr_base_repo": Context.UBUNTU_MAPR_BASE_REPO,
|
|
||||||
"ubuntu_mapr_ecosystem_repo": Context.UBUNTU_MAPR_ECOSYSTEM_REPO,
|
|
||||||
"centos_mapr_repo": Context.CENTOS_MAPR_BASE_REPO,
|
|
||||||
"centos_mapr_ecosystem_repo": Context.CENTOS_MAPR_ECOSYSTEM_REPO,
|
|
||||||
}
|
|
||||||
return script_template % args
|
|
||||||
|
@ -15,18 +15,9 @@
|
|||||||
|
|
||||||
import sahara.plugins.mapr.base.base_cluster_context as bc
|
import sahara.plugins.mapr.base.base_cluster_context as bc
|
||||||
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||||
from sahara.utils import files as f
|
|
||||||
|
|
||||||
|
|
||||||
class Context(bc.BaseClusterContext):
|
class Context(bc.BaseClusterContext):
|
||||||
UBUNTU_MAPR_BASE_REPO = ('http://package.mapr.com/releases/v4.0.1/ubuntu/ '
|
|
||||||
'mapr optional')
|
|
||||||
UBUNTU_MAPR_ECOSYSTEM_REPO = ('http://package.mapr.com/releases/'
|
|
||||||
'ecosystem-4.x/ubuntu binary/')
|
|
||||||
CENTOS_MAPR_BASE_REPO = 'http://package.mapr.com/releases/v4.0.1/redhat/'
|
|
||||||
CENTOS_MAPR_ECOSYSTEM_REPO = ('http://package.mapr.com/releases/'
|
|
||||||
'ecosystem-4.x/redhat')
|
|
||||||
|
|
||||||
def __init__(self, cluster, version_handler, added=None, removed=None):
|
def __init__(self, cluster, version_handler, added=None, removed=None):
|
||||||
super(Context, self).__init__(cluster, version_handler, added, removed)
|
super(Context, self).__init__(cluster, version_handler, added, removed)
|
||||||
self._hadoop_version = mr.MapReduce().version
|
self._hadoop_version = mr.MapReduce().version
|
||||||
@ -35,38 +26,24 @@ class Context(bc.BaseClusterContext):
|
|||||||
self._resource_manager_uri = 'maprfs:///'
|
self._resource_manager_uri = 'maprfs:///'
|
||||||
self._cluster_mode = mr.MapReduce.cluster_mode
|
self._cluster_mode = mr.MapReduce.cluster_mode
|
||||||
self._node_aware = False
|
self._node_aware = False
|
||||||
|
self._mapr_version = '4.0.1'
|
||||||
|
self._ubuntu_ecosystem_repo = (
|
||||||
|
'http://package.mapr.com/releases/ecosystem-4.x/ubuntu binary/')
|
||||||
|
self._centos_ecosystem_repo = (
|
||||||
|
'http://package.mapr.com/releases/ecosystem-4.x/redhat')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hadoop_lib(self):
|
def hadoop_lib(self):
|
||||||
if not self._hadoop_lib:
|
if not self._hadoop_lib:
|
||||||
f = '%(hadoop_home)s/lib'
|
self._hadoop_lib = '%s/lib' % self.hadoop_home
|
||||||
args = {
|
|
||||||
'hadoop_home': self.hadoop_home,
|
|
||||||
}
|
|
||||||
self._hadoop_lib = f % args
|
|
||||||
return self._hadoop_lib
|
return self._hadoop_lib
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hadoop_conf(self):
|
def hadoop_conf(self):
|
||||||
if not self._hadoop_conf:
|
if not self._hadoop_conf:
|
||||||
f = '%(hadoop_home)s/conf'
|
self._hadoop_conf = '%s/conf' % self.hadoop_home
|
||||||
args = {
|
|
||||||
'hadoop_home': self.hadoop_home,
|
|
||||||
}
|
|
||||||
self._hadoop_conf = f % args
|
|
||||||
return self._hadoop_conf
|
return self._hadoop_conf
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def resource_manager_uri(self):
|
def resource_manager_uri(self):
|
||||||
return self._resource_manager_uri
|
return self._resource_manager_uri
|
||||||
|
|
||||||
def get_install_repo_script_data(self):
|
|
||||||
script_template = 'plugins/mapr/resources/add_mapr_repo.sh'
|
|
||||||
script_template = f.get_file_text(script_template)
|
|
||||||
args = {
|
|
||||||
"ubuntu_mapr_base_repo": Context.UBUNTU_MAPR_BASE_REPO,
|
|
||||||
"ubuntu_mapr_ecosystem_repo": Context.UBUNTU_MAPR_ECOSYSTEM_REPO,
|
|
||||||
"centos_mapr_repo": Context.CENTOS_MAPR_BASE_REPO,
|
|
||||||
"centos_mapr_ecosystem_repo": Context.CENTOS_MAPR_ECOSYSTEM_REPO,
|
|
||||||
}
|
|
||||||
return script_template % args
|
|
||||||
|
@ -15,18 +15,9 @@
|
|||||||
|
|
||||||
import sahara.plugins.mapr.base.base_cluster_context as bc
|
import sahara.plugins.mapr.base.base_cluster_context as bc
|
||||||
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
||||||
from sahara.utils import files as f
|
|
||||||
|
|
||||||
|
|
||||||
class Context(bc.BaseClusterContext):
|
class Context(bc.BaseClusterContext):
|
||||||
UBUNTU_MAPR_BASE_REPO = ('http://package.mapr.com/releases/v4.0.1/ubuntu/'
|
|
||||||
' mapr optional')
|
|
||||||
UBUNTU_MAPR_ECOSYSTEM_REPO = ('http://package.mapr.com/releases/'
|
|
||||||
'ecosystem-4.x/ubuntu binary/')
|
|
||||||
CENTOS_MAPR_BASE_REPO = 'http://package.mapr.com/releases/v4.0.1/redhat/'
|
|
||||||
CENTOS_MAPR_ECOSYSTEM_REPO = ('http://package.mapr.com/releases/'
|
|
||||||
'ecosystem-4.x/redhat')
|
|
||||||
|
|
||||||
def __init__(self, cluster, version_handler, added=None, removed=None):
|
def __init__(self, cluster, version_handler, added=None, removed=None):
|
||||||
super(Context, self).__init__(cluster, version_handler, added, removed)
|
super(Context, self).__init__(cluster, version_handler, added, removed)
|
||||||
self._hadoop_version = yarn.YARNv241().version
|
self._hadoop_version = yarn.YARNv241().version
|
||||||
@ -35,25 +26,22 @@ class Context(bc.BaseClusterContext):
|
|||||||
self._resource_manager_uri = None
|
self._resource_manager_uri = None
|
||||||
self._cluster_mode = yarn.YARNv241.cluster_mode
|
self._cluster_mode = yarn.YARNv241.cluster_mode
|
||||||
self._node_aware = False
|
self._node_aware = False
|
||||||
|
self._mapr_version = '4.0.1'
|
||||||
|
self._ubuntu_ecosystem_repo = (
|
||||||
|
'http://package.mapr.com/releases/ecosystem-4.x/ubuntu binary/')
|
||||||
|
self._centos_ecosystem_repo = (
|
||||||
|
'http://package.mapr.com/releases/ecosystem-4.x/redhat')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hadoop_lib(self):
|
def hadoop_lib(self):
|
||||||
if not self._hadoop_lib:
|
if not self._hadoop_lib:
|
||||||
f = '%(hadoop_home)s/share/hadoop/common'
|
self._hadoop_lib = '%s/share/hadoop/common' % self.hadoop_home
|
||||||
args = {
|
|
||||||
'hadoop_home': self.hadoop_home,
|
|
||||||
}
|
|
||||||
self._hadoop_lib = f % args
|
|
||||||
return self._hadoop_lib
|
return self._hadoop_lib
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hadoop_conf(self):
|
def hadoop_conf(self):
|
||||||
if not self._hadoop_conf:
|
if not self._hadoop_conf:
|
||||||
f = '%(hadoop_home)s/etc/hadoop'
|
self._hadoop_conf = '%s/etc/hadoop' % self.hadoop_home
|
||||||
args = {
|
|
||||||
'hadoop_home': self.hadoop_home,
|
|
||||||
}
|
|
||||||
self._hadoop_conf = f % args
|
|
||||||
return self._hadoop_conf
|
return self._hadoop_conf
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -74,14 +62,3 @@ class Context(bc.BaseClusterContext):
|
|||||||
}
|
}
|
||||||
self._configure_sh = f % args
|
self._configure_sh = f % args
|
||||||
return self._configure_sh
|
return self._configure_sh
|
||||||
|
|
||||||
def get_install_repo_script_data(self):
|
|
||||||
script_template = 'plugins/mapr/resources/add_mapr_repo.sh'
|
|
||||||
script_template = f.get_file_text(script_template)
|
|
||||||
args = {
|
|
||||||
"ubuntu_mapr_base_repo": Context.UBUNTU_MAPR_BASE_REPO,
|
|
||||||
"ubuntu_mapr_ecosystem_repo": Context.UBUNTU_MAPR_ECOSYSTEM_REPO,
|
|
||||||
"centos_mapr_repo": Context.CENTOS_MAPR_BASE_REPO,
|
|
||||||
"centos_mapr_ecosystem_repo": Context.CENTOS_MAPR_ECOSYSTEM_REPO,
|
|
||||||
}
|
|
||||||
return script_template % args
|
|
||||||
|
Loading…
Reference in New Issue
Block a user