Plugins splitted from sahara core
Change-Id: I43e0beec6508f93a436a150749bfa23571986b9d
This commit is contained in:
parent
31aa7c01f2
commit
40fd2aa34e
@ -1,3 +1,3 @@
|
||||
[DEFAULT]
|
||||
test_path=./sahara/tests/unit
|
||||
test_path=./sahara_plugin_mapr/tests/unit
|
||||
top_dir=./
|
||||
|
35
README.rst
Normal file
35
README.rst
Normal file
@ -0,0 +1,35 @@
|
||||
========================
|
||||
Team and repository tags
|
||||
========================
|
||||
|
||||
.. image:: https://governance.openstack.org/tc/badges/sahara.svg
|
||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||
|
||||
.. Change things from this point on
|
||||
|
||||
OpenStack Data Processing ("Sahara") project
|
||||
============================================
|
||||
|
||||
Sahara at wiki.openstack.org: https://wiki.openstack.org/wiki/Sahara
|
||||
|
||||
Storyboard project: https://storyboard.openstack.org/#!/project/935
|
||||
|
||||
Sahara docs site: https://docs.openstack.org/sahara/latest/
|
||||
|
||||
Roadmap: https://wiki.openstack.org/wiki/Sahara/Roadmap
|
||||
|
||||
Quickstart guide: https://docs.openstack.org/sahara/latest/user/quickstart.html
|
||||
|
||||
How to participate: https://docs.openstack.org/sahara/latest/contributor/how-to-participate.html
|
||||
|
||||
Source: https://git.openstack.org/cgit/openstack/sahara
|
||||
|
||||
Bugs and feature requests: https://storyboard.openstack.org/#!/project/935
|
||||
|
||||
Release notes: https://docs.openstack.org/releasenotes/sahara/
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
9
doc/requirements.txt
Normal file
9
doc/requirements.txt
Normal file
@ -0,0 +1,9 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
openstackdocstheme>=1.18.1 # Apache-2.0
|
||||
os-api-ref>=1.4.0 # Apache-2.0
|
||||
reno>=2.5.0 # Apache-2.0
|
||||
sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
|
||||
sphinxcontrib-httpdomain>=1.3.0 # BSD
|
||||
whereto>=0.3.0 # Apache-2.0
|
162
lower-constraints.txt
Normal file
162
lower-constraints.txt
Normal file
@ -0,0 +1,162 @@
|
||||
alabaster==0.7.10
|
||||
alembic==0.8.10
|
||||
amqp==2.2.2
|
||||
appdirs==1.4.3
|
||||
asn1crypto==0.24.0
|
||||
astroid==1.3.8
|
||||
Babel==2.3.4
|
||||
bandit==1.1.0
|
||||
bashate==0.5.1
|
||||
bcrypt==3.1.4
|
||||
botocore==1.5.1
|
||||
cachetools==2.0.1
|
||||
castellan==0.16.0
|
||||
certifi==2018.1.18
|
||||
cffi==1.11.5
|
||||
chardet==3.0.4
|
||||
click==6.7
|
||||
cliff==2.11.0
|
||||
cmd2==0.8.1
|
||||
contextlib2==0.5.5
|
||||
coverage==4.0
|
||||
cryptography==2.1.4
|
||||
debtcollector==1.19.0
|
||||
decorator==4.2.1
|
||||
deprecation==2.0
|
||||
doc8==0.6.0
|
||||
docutils==0.14
|
||||
dogpile.cache==0.6.5
|
||||
dulwich==0.19.0
|
||||
enum-compat==0.0.2
|
||||
eventlet==0.18.2
|
||||
extras==1.0.0
|
||||
fasteners==0.14.1
|
||||
fixtures==3.0.0
|
||||
flake8==2.6.2
|
||||
Flask==1.0.2
|
||||
future==0.16.0
|
||||
futurist==1.6.0
|
||||
gitdb2==2.0.3
|
||||
GitPython==2.1.8
|
||||
greenlet==0.4.13
|
||||
hacking==1.1.0
|
||||
idna==2.6
|
||||
imagesize==1.0.0
|
||||
iso8601==0.1.11
|
||||
itsdangerous==0.24
|
||||
Jinja2==2.10
|
||||
jmespath==0.9.3
|
||||
jsonpatch==1.21
|
||||
jsonpointer==2.0
|
||||
jsonschema==2.6.0
|
||||
keystoneauth1==3.4.0
|
||||
keystonemiddleware==4.17.0
|
||||
kombu==4.1.0
|
||||
linecache2==1.0.0
|
||||
logilab-common==1.4.1
|
||||
Mako==1.0.7
|
||||
MarkupSafe==1.0
|
||||
mccabe==0.2.1
|
||||
mock==2.0.0
|
||||
monotonic==1.4
|
||||
mox3==0.25.0
|
||||
msgpack==0.5.6
|
||||
munch==2.2.0
|
||||
netaddr==0.7.19
|
||||
netifaces==0.10.6
|
||||
openstackdocstheme==1.18.1
|
||||
openstacksdk==0.12.0
|
||||
os-api-ref==1.4.0
|
||||
os-client-config==1.29.0
|
||||
os-service-types==1.2.0
|
||||
osc-lib==1.10.0
|
||||
oslo.cache==1.29.0
|
||||
oslo.concurrency==3.26.0
|
||||
oslo.config==5.2.0
|
||||
oslo.context==2.19.2
|
||||
oslo.db==4.27.0
|
||||
oslo.i18n==3.15.3
|
||||
oslo.log==3.36.0
|
||||
oslo.messaging==5.29.0
|
||||
oslo.middleware==3.31.0
|
||||
oslo.policy==1.30.0
|
||||
oslo.rootwrap==5.8.0
|
||||
oslo.serialization==2.18.0
|
||||
oslo.service==1.24.0
|
||||
oslo.upgradecheck==0.1.0
|
||||
oslo.utils==3.33.0
|
||||
oslotest==3.2.0
|
||||
packaging==17.1
|
||||
paramiko==2.0.0
|
||||
Paste==2.0.3
|
||||
PasteDeploy==1.5.2
|
||||
pbr==2.0.0
|
||||
pika-pool==0.1.3
|
||||
pika==0.10.0
|
||||
prettytable==0.7.2
|
||||
psycopg2==2.6.2
|
||||
pyasn1==0.4.2
|
||||
pycadf==2.7.0
|
||||
pycparser==2.18
|
||||
pycodestyle==2.4.0
|
||||
pyflakes==0.8.1
|
||||
Pygments==2.2.0
|
||||
pyinotify==0.9.6
|
||||
pylint==1.4.5
|
||||
PyMySQL==0.7.6
|
||||
PyNaCl==1.2.1
|
||||
pyOpenSSL==17.5.0
|
||||
pyparsing==2.2.0
|
||||
pyperclip==1.6.0
|
||||
python-barbicanclient==4.6.0
|
||||
python-cinderclient==3.3.0
|
||||
python-dateutil==2.7.0
|
||||
python-editor==1.0.3
|
||||
python-glanceclient==2.8.0
|
||||
python-heatclient==1.10.0
|
||||
python-keystoneclient==3.8.0
|
||||
python-manilaclient==1.16.0
|
||||
python-mimeparse==1.6.0
|
||||
python-neutronclient==6.7.0
|
||||
python-novaclient==9.1.0
|
||||
python-openstackclient==3.14.0
|
||||
python-saharaclient==1.4.0
|
||||
python-subunit==1.2.0
|
||||
python-swiftclient==3.2.0
|
||||
pytz==2018.3
|
||||
PyYAML==3.12
|
||||
reno==2.5.0
|
||||
repoze.lru==0.7
|
||||
requests==2.14.2
|
||||
requestsexceptions==1.4.0
|
||||
restructuredtext-lint==1.1.3
|
||||
rfc3986==1.1.0
|
||||
Routes==2.4.1
|
||||
simplejson==3.13.2
|
||||
six==1.10.0
|
||||
smmap2==2.0.3
|
||||
snowballstemmer==1.2.1
|
||||
Sphinx==1.6.2
|
||||
sphinxcontrib-httpdomain==1.3.0
|
||||
sphinxcontrib-websupport==1.0.1
|
||||
sqlalchemy-migrate==0.11.0
|
||||
SQLAlchemy==1.0.10
|
||||
sqlparse==0.2.4
|
||||
statsd==3.2.2
|
||||
stestr==1.0.0
|
||||
stevedore==1.20.0
|
||||
Tempita==0.5.2
|
||||
tenacity==4.9.0
|
||||
testresources==2.0.0
|
||||
testscenarios==0.4
|
||||
testtools==2.2.0
|
||||
tooz==1.58.0
|
||||
traceback2==1.4.0
|
||||
unittest2==1.1.0
|
||||
urllib3==1.22
|
||||
vine==1.1.4
|
||||
voluptuous==0.11.1
|
||||
warlock==1.3.0
|
||||
WebOb==1.7.1
|
||||
Werkzeug==0.14.1
|
||||
wrapt==1.10.11
|
@ -39,6 +39,7 @@ python-swiftclient>=3.2.0 # Apache-2.0
|
||||
python-neutronclient>=6.7.0 # Apache-2.0
|
||||
python-heatclient>=1.10.0 # Apache-2.0
|
||||
python-glanceclient>=2.8.0 # Apache-2.0
|
||||
sahara
|
||||
six>=1.10.0 # MIT
|
||||
stevedore>=1.20.0 # Apache-2.0
|
||||
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
||||
|
@ -18,24 +18,23 @@ import abc
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from sahara import conductor
|
||||
from sahara import context
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.abstract.configurer as ac
|
||||
from sahara.plugins.mapr.domain import distro as d
|
||||
from sahara.plugins.mapr.domain import service as srvc
|
||||
import sahara.plugins.mapr.services.management.management as mng
|
||||
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||
from sahara.plugins.mapr.services.maprfs import maprfs
|
||||
from sahara.plugins.mapr.services.mysql import mysql
|
||||
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
||||
from sahara.plugins.mapr.util import event_log as el
|
||||
import sahara.plugins.mapr.util.general as util
|
||||
import sahara.plugins.mapr.util.password_utils as pu
|
||||
import sahara.utils.files as files
|
||||
from sahara.plugins import conductor
|
||||
from sahara.plugins import context
|
||||
import sahara.plugins.utils as utils
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.abstract.configurer as ac
|
||||
from sahara_plugin_mapr.plugins.mapr.domain import distro as d
|
||||
from sahara_plugin_mapr.plugins.mapr.domain import service as srvc
|
||||
import sahara_plugin_mapr.plugins.mapr.services.management.management as mng
|
||||
import sahara_plugin_mapr.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||
from sahara_plugin_mapr.plugins.mapr.services.maprfs import maprfs
|
||||
from sahara_plugin_mapr.plugins.mapr.services.mysql import mysql
|
||||
import sahara_plugin_mapr.plugins.mapr.services.yarn.yarn as yarn
|
||||
from sahara_plugin_mapr.plugins.mapr.util import event_log as el
|
||||
import sahara_plugin_mapr.plugins.mapr.util.general as util
|
||||
import sahara_plugin_mapr.plugins.mapr.util.password_utils as pu
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
conductor = conductor.API
|
||||
|
||||
_JAVA_HOME = '/usr/java/jdk1.7.0_51'
|
||||
_CONFIGURE_SH_TIMEOUT = 600
|
||||
@ -173,7 +172,7 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
||||
topology_map = "\n".join(topology_map) + "\n"
|
||||
|
||||
data_path = "%s/topology.data" % cluster_context.mapr_home
|
||||
script = files.get_file_text(_TOPO_SCRIPT)
|
||||
script = utils.get_file_text(_TOPO_SCRIPT, 'sahara_plugin_mapr')
|
||||
script_path = '%s/topology.sh' % cluster_context.mapr_home
|
||||
|
||||
@el.provision_event()
|
||||
@ -310,7 +309,7 @@ class BaseConfigurer(ac.AbstractConfigurer):
|
||||
'port': mysql.MySQL.MYSQL_SERVER_PORT,
|
||||
})
|
||||
|
||||
with context.ThreadGroup() as tg:
|
||||
with context.PluginsThreadGroup() as tg:
|
||||
for instance in instances:
|
||||
tg.spawn('configure-sh-%s' % instance.id,
|
||||
self._configure_sh_instance, cluster_context,
|
@ -17,21 +17,20 @@ import collections
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
import sahara.exceptions as e
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.abstract.cluster_context as cc
|
||||
import sahara.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara.plugins.mapr.domain.distro as distro
|
||||
import sahara.plugins.mapr.services.management.management as mng
|
||||
import sahara.plugins.mapr.services.maprfs.maprfs as mfs
|
||||
import sahara.plugins.mapr.services.oozie.oozie as oozie
|
||||
from sahara.plugins.mapr.services.swift import swift
|
||||
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
||||
import sahara.plugins.mapr.util.general as g
|
||||
import sahara.plugins.mapr.util.service_utils as su
|
||||
import sahara.plugins.exceptions as e
|
||||
from sahara.plugins import topology_helper as th
|
||||
import sahara.plugins.utils as u
|
||||
from sahara.topology import topology_helper as th
|
||||
import sahara.utils.configs as sahara_configs
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.abstract.cluster_context as cc
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.distro as distro
|
||||
import sahara_plugin_mapr.plugins.mapr.services.management.management as mng
|
||||
import sahara_plugin_mapr.plugins.mapr.services.maprfs.maprfs as mfs
|
||||
import sahara_plugin_mapr.plugins.mapr.services.oozie.oozie as oozie
|
||||
from sahara_plugin_mapr.plugins.mapr.services.swift import swift
|
||||
import sahara_plugin_mapr.plugins.mapr.services.yarn.yarn as yarn
|
||||
import sahara_plugin_mapr.plugins.mapr.util.general as g
|
||||
import sahara_plugin_mapr.plugins.mapr.util.service_utils as su
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
||||
@ -275,12 +274,12 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
||||
def get_service(self, node_process):
|
||||
ui_name = self.get_service_name_by_node_process(node_process)
|
||||
if ui_name is None:
|
||||
raise e.InvalidDataException(
|
||||
raise e.PluginInvalidDataException(
|
||||
_('Service not found in services list'))
|
||||
version = self.get_chosen_service_version(ui_name)
|
||||
service = self._find_service_instance(ui_name, version)
|
||||
if service is None:
|
||||
raise e.InvalidDataException(_('Can not map service'))
|
||||
raise e.PluginInvalidDataException(_('Can not map service'))
|
||||
return service
|
||||
|
||||
def _find_service_instance(self, ui_name, version):
|
||||
@ -417,7 +416,7 @@ class BaseClusterContext(cc.AbstractClusterContext):
|
||||
services = self.get_cluster_services(node_group)
|
||||
user_configs = node_group.configuration()
|
||||
default_configs = self.get_services_configs_dict(services)
|
||||
return sahara_configs.merge_configs(default_configs, user_configs)
|
||||
return u.merge_configs(default_configs, user_configs)
|
||||
|
||||
def get_config_files(self, node_group):
|
||||
services = self.get_cluster_services(node_group)
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.abstract.cluster_validator as v
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara.plugins.mapr.versions.version_handler_factory as vhf
|
||||
import sahara_plugin_mapr.plugins.mapr.abstract.cluster_validator as v
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.versions.version_handler_factory as vhf
|
||||
|
||||
|
||||
class BaseValidator(v.AbstractValidator):
|
@ -15,15 +15,14 @@
|
||||
|
||||
import os
|
||||
|
||||
from sahara import context
|
||||
import sahara.plugins.mapr.util.maprfs_helper as mfs
|
||||
import sahara.plugins.mapr.versions.version_handler_factory as vhf
|
||||
from sahara.plugins import context
|
||||
from sahara.plugins import edp
|
||||
from sahara.service.edp.job_binaries import manager as jb_manager
|
||||
import sahara.service.edp.oozie.engine as e
|
||||
from sahara.utils import edp
|
||||
import sahara_plugin_mapr.plugins.mapr.util.maprfs_helper as mfs
|
||||
import sahara_plugin_mapr.plugins.mapr.versions.version_handler_factory as vhf
|
||||
|
||||
|
||||
class MapROozieJobEngine(e.OozieJobEngine):
|
||||
class MapROozieJobEngine(edp.PluginsOozieJobEngine):
|
||||
def __init__(self, cluster):
|
||||
super(MapROozieJobEngine, self).__init__(cluster)
|
||||
self.cluster_context = self._get_cluster_context(self.cluster)
|
@ -15,12 +15,12 @@
|
||||
|
||||
import functools
|
||||
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.abstract.health_checker as hc
|
||||
from sahara.plugins.mapr.domain import node_process as np
|
||||
from sahara.plugins.mapr.services.management import management
|
||||
from sahara.plugins.mapr.services.spark import spark
|
||||
from sahara.service.health import health_check_base
|
||||
from sahara.plugins import health_check_base
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.abstract.health_checker as hc
|
||||
from sahara_plugin_mapr.plugins.mapr.domain import node_process as np
|
||||
from sahara_plugin_mapr.plugins.mapr.services.management import management
|
||||
from sahara_plugin_mapr.plugins.mapr.services.spark import spark
|
||||
|
||||
|
||||
class BaseHealthChecker(hc.AbstractHealthChecker):
|
@ -19,14 +19,14 @@ from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils as json
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from sahara import context
|
||||
from sahara.i18n import _
|
||||
from sahara.plugins import context
|
||||
import sahara.plugins.exceptions as ex
|
||||
import sahara.plugins.mapr.abstract.node_manager as s
|
||||
import sahara.plugins.mapr.services.management.management as mng
|
||||
import sahara.plugins.mapr.services.maprfs.maprfs as mfs
|
||||
import sahara.plugins.mapr.util.event_log as el
|
||||
from sahara.utils import cluster_progress_ops as cpo
|
||||
from sahara.plugins import utils
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.abstract.node_manager as s
|
||||
import sahara_plugin_mapr.plugins.mapr.services.management.management as mng
|
||||
import sahara_plugin_mapr.plugins.mapr.services.maprfs.maprfs as mfs
|
||||
import sahara_plugin_mapr.plugins.mapr.util.event_log as el
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -80,14 +80,15 @@ class BaseNodeManager(s.AbstractNodeManager):
|
||||
others = filter(
|
||||
lambda i: not cluster_context.check_for_process(i, mfs.CLDB),
|
||||
instances)
|
||||
cpo.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Start ZooKeepers nodes"), len(zookeepers))
|
||||
utils.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Start ZooKeepers nodes"),
|
||||
len(zookeepers))
|
||||
self._start_zk_nodes(zookeepers)
|
||||
cpo.add_provisioning_step(cluster_context.cluster.id,
|
||||
utils.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Start CLDB nodes"), len(cldbs))
|
||||
self._start_cldb_nodes(cldbs)
|
||||
if others:
|
||||
cpo.add_provisioning_step(cluster_context.cluster.id,
|
||||
utils.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Start non-CLDB nodes"),
|
||||
len(list(others)))
|
||||
self._start_non_cldb_nodes(others)
|
||||
@ -96,10 +97,11 @@ class BaseNodeManager(s.AbstractNodeManager):
|
||||
def stop(self, cluster_context, instances=None):
|
||||
instances = instances or cluster_context.get_instances()
|
||||
zookeepers = cluster_context.filter_instances(instances, mng.ZOOKEEPER)
|
||||
cpo.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Stop ZooKeepers nodes"), len(zookeepers))
|
||||
utils.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Stop ZooKeepers nodes"),
|
||||
len(zookeepers))
|
||||
self._stop_zk_nodes(zookeepers)
|
||||
cpo.add_provisioning_step(cluster_context.cluster.id,
|
||||
utils.add_provisioning_step(cluster_context.cluster.id,
|
||||
_("Stop Warden nodes"), len(instances))
|
||||
self._stop_warden_on_nodes(instances)
|
||||
|
||||
@ -133,13 +135,13 @@ class BaseNodeManager(s.AbstractNodeManager):
|
||||
raise ex.HadoopProvisionError(_("CLDB failed to start"))
|
||||
|
||||
def _start_nodes(self, instances, sys_service):
|
||||
with context.ThreadGroup() as tg:
|
||||
with context.PluginsThreadGroup() as tg:
|
||||
for instance in instances:
|
||||
tg.spawn('start-%s-%s' % (sys_service, instance.id),
|
||||
self._start_service, instance, sys_service)
|
||||
|
||||
def _stop_nodes(self, instances, sys_service):
|
||||
with context.ThreadGroup() as tg:
|
||||
with context.PluginsThreadGroup() as tg:
|
||||
for instance in instances:
|
||||
tg.spawn('stop-%s-%s' % (sys_service, instance.id),
|
||||
self._stop_service, instance, sys_service)
|
@ -15,23 +15,23 @@
|
||||
|
||||
import collections as c
|
||||
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.abstract.version_handler as vh
|
||||
import sahara.plugins.mapr.base.base_cluster_configurer as base_conf
|
||||
import sahara.plugins.mapr.base.base_cluster_validator as bv
|
||||
import sahara.plugins.mapr.base.base_edp_engine as edp
|
||||
import sahara.plugins.mapr.base.base_health_checker as health
|
||||
import sahara.plugins.mapr.base.base_node_manager as bs
|
||||
from sahara.plugins.mapr import images
|
||||
import sahara.plugins.mapr.util.general as util
|
||||
import sahara.plugins.provisioning as p
|
||||
import sahara.plugins.utils as u
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.abstract.version_handler as vh
|
||||
import sahara_plugin_mapr.plugins.mapr.base.base_cluster_configurer as b_conf
|
||||
import sahara_plugin_mapr.plugins.mapr.base.base_cluster_validator as bv
|
||||
import sahara_plugin_mapr.plugins.mapr.base.base_edp_engine as edp
|
||||
import sahara_plugin_mapr.plugins.mapr.base.base_health_checker as health
|
||||
import sahara_plugin_mapr.plugins.mapr.base.base_node_manager as bs
|
||||
from sahara_plugin_mapr.plugins.mapr import images
|
||||
import sahara_plugin_mapr.plugins.mapr.util.general as util
|
||||
|
||||
|
||||
class BaseVersionHandler(vh.AbstractVersionHandler):
|
||||
def __init__(self):
|
||||
self._validator = bv.BaseValidator()
|
||||
self._configurer = base_conf.BaseConfigurer()
|
||||
self._configurer = b_conf.BaseConfigurer()
|
||||
self._health_checker = health.BaseHealthChecker()
|
||||
self._node_manager = bs.BaseNodeManager()
|
||||
self._version = None
|
@ -20,9 +20,9 @@ import re
|
||||
import jinja2 as j2
|
||||
import six
|
||||
|
||||
import sahara.exceptions as e
|
||||
from sahara.i18n import _
|
||||
import sahara.utils.xmlutils as xml
|
||||
import sahara.plugins.exceptions as e
|
||||
import sahara.plugins.utils as utils
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
@ -88,12 +88,12 @@ class HadoopXML(BaseConfigurationFile):
|
||||
super(HadoopXML, self).__init__(file_name)
|
||||
|
||||
def parse(self, content):
|
||||
configs = xml.parse_hadoop_xml_with_name_and_value(content)
|
||||
configs = utils.parse_hadoop_xml_with_name_and_value(content)
|
||||
for cfg in configs:
|
||||
self.add_property(cfg["name"], cfg["value"])
|
||||
|
||||
def render(self):
|
||||
return xml.create_hadoop_xml(self._config_dict)
|
||||
return utils.create_hadoop_xml(self._config_dict)
|
||||
|
||||
|
||||
class RawFile(BaseConfigurationFile):
|
||||
@ -141,7 +141,8 @@ class TemplateFile(BaseConfigurationFile):
|
||||
if template:
|
||||
return template.render(arg_dict)
|
||||
else:
|
||||
raise e.InvalidDataException(_('Template object must be defined'))
|
||||
raise e.PluginsInvalidDataException(
|
||||
_('Template object must be defined'))
|
||||
|
||||
def render(self):
|
||||
return self._j2_render(self._template, self._config_dict)
|
@ -15,9 +15,9 @@
|
||||
|
||||
import oslo_serialization.jsonutils as json
|
||||
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.util.general as util
|
||||
from sahara.utils import poll_utils as polls
|
||||
from sahara.plugins import utils as plugin_utils
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.util.general as util
|
||||
|
||||
|
||||
WARDEN_MANAGED_CMD = ('sudo -u mapr maprcli node services'
|
||||
@ -77,7 +77,7 @@ class NodeProcess(object):
|
||||
'instance': instance.instance_name,
|
||||
'status': status.name,
|
||||
}
|
||||
return polls.poll(
|
||||
return plugin_utils.poll(
|
||||
get_status=lambda: self.status(instance) == status,
|
||||
operation_name=operation_name.format(**args),
|
||||
timeout=timeout,
|
@ -17,15 +17,14 @@ from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils as json
|
||||
import six
|
||||
|
||||
import sahara.exceptions as e
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.exceptions as ex
|
||||
from sahara.plugins.mapr.util import commands as cmd
|
||||
from sahara.plugins.mapr.util import event_log as el
|
||||
from sahara.plugins.mapr.util import general as g
|
||||
from sahara.plugins.mapr.util import service_utils as su
|
||||
import sahara.plugins.provisioning as p
|
||||
from sahara.utils import files
|
||||
from sahara.plugins import utils
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
from sahara_plugin_mapr.plugins.mapr.util import commands as cmd
|
||||
from sahara_plugin_mapr.plugins.mapr.util import event_log as el
|
||||
from sahara_plugin_mapr.plugins.mapr.util import general as g
|
||||
from sahara_plugin_mapr.plugins.mapr.util import service_utils as su
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -160,7 +159,7 @@ class Service(object):
|
||||
return {self.ui_name: result}
|
||||
|
||||
def _load_config_file(self, file_path=None):
|
||||
return json.loads(files.get_file_text(file_path))
|
||||
return json.loads(utils.get_file_text(file_path, 'sahara_plugin_mapr'))
|
||||
|
||||
def get_config_files(self, cluster_context, configs, instance=None):
|
||||
return []
|
||||
@ -180,7 +179,7 @@ class Service(object):
|
||||
raise ex.HadoopProvisionError(_("Config missing 'name'"))
|
||||
|
||||
if conf_value is None:
|
||||
raise e.InvalidDataException(
|
||||
raise ex.PluginInvalidDataException(
|
||||
_("Config '%s' missing 'value'") % conf_name)
|
||||
|
||||
if high_priority or item.get('priority', 2) == 1:
|
@ -19,7 +19,8 @@ from sahara.plugins import utils as plugin_utils
|
||||
|
||||
_validator = images.SaharaImageValidator.from_yaml(
|
||||
'plugins/mapr/resources/images/image.yaml',
|
||||
resource_roots=['plugins/mapr/resources/images'])
|
||||
resource_roots=['plugins/mapr/resources/images'],
|
||||
package='sahara_plugin_mapr')
|
||||
|
||||
|
||||
def get_image_arguments():
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.versions.version_handler_factory as vhf
|
||||
import sahara.plugins.provisioning as p
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.versions.version_handler_factory as vhf
|
||||
|
||||
|
||||
class MapRPlugin(p.ProvisioningPluginBase):
|
@ -13,10 +13,10 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.util.commands as cmd
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.util.commands as cmd
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
DRILL = np.NodeProcess(
|
||||
name='drill-bits',
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
|
||||
FLUME = np.NodeProcess(
|
@ -13,10 +13,10 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
|
||||
HBASE_MASTER = np.NodeProcess(
|
@ -16,13 +16,13 @@
|
||||
from oslo_log import log as logging
|
||||
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.services.sentry.sentry as sentry
|
||||
import sahara.plugins.mapr.util.event_log as el
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara.utils.files as files
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.services.sentry.sentry as sentry
|
||||
import sahara_plugin_mapr.plugins.mapr.util.event_log as el
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -58,7 +58,8 @@ class Hive(s.Service):
|
||||
hive_site.remote_path = self.conf_dir(cluster_context)
|
||||
if instance:
|
||||
hive_site.fetch(instance)
|
||||
hive_site.parse(files.get_file_text(hive_default))
|
||||
hive_site.parse(files.get_file_text(hive_default,
|
||||
'sahara_plugin_mapr'))
|
||||
hive_site.add_properties(self._get_hive_site_props(cluster_context))
|
||||
sentry_host = cluster_context.get_instance(sentry.SENTRY)
|
||||
if sentry_host:
|
||||
@ -74,19 +75,21 @@ class Hive(s.Service):
|
||||
'sentry-default.xml'
|
||||
sentry_db = \
|
||||
'plugins/mapr/services/hive/resources/sentry-db.xml'
|
||||
hive_site.parse(files.get_file_text(sentry_default))
|
||||
hive_site.parse(files.get_file_text(sentry_default,
|
||||
'sahara_plugin_mapr'))
|
||||
hive_site.add_property('hive.sentry.conf.url',
|
||||
'file://%s/sentry-site.xml' %
|
||||
sentry_service.conf_dir(
|
||||
cluster_context))
|
||||
if sentry_mode == sentry.DB_STORAGE_SENTRY_MODE:
|
||||
hive_site.parse(files.get_file_text(sentry_db))
|
||||
hive_site.parse(files.get_file_text(sentry_db,
|
||||
'sahara_plugin_mapr'))
|
||||
|
||||
return [hive_site]
|
||||
|
||||
def _get_hive_site_props(self, cluster_context):
|
||||
# Import here to resolve circular dependency
|
||||
from sahara.plugins.mapr.services.mysql import mysql
|
||||
from sahara_plugin_mapr.plugins.mapr.services.mysql import mysql
|
||||
|
||||
zookeepers = cluster_context.get_zookeeper_nodes_ip()
|
||||
metastore_specs = mysql.MySQL.METASTORE_SPECS
|
||||
@ -104,7 +107,7 @@ class Hive(s.Service):
|
||||
|
||||
def _get_jdbc_uri(self, cluster_context):
|
||||
# Import here to resolve circular dependency
|
||||
from sahara.plugins.mapr.services.mysql import mysql
|
||||
from sahara_plugin_mapr.plugins.mapr.services.mysql import mysql
|
||||
|
||||
jdbc_uri = ('jdbc:mysql://%(db_host)s:%(db_port)s/%(db_name)s?'
|
||||
'createDatabaseIfNotExist=true')
|
||||
@ -122,7 +125,7 @@ class Hive(s.Service):
|
||||
|
||||
def post_start(self, cluster_context, instances):
|
||||
# Import here to resolve circular dependency
|
||||
import sahara.plugins.mapr.services.maprfs.maprfs as mfs
|
||||
import sahara_plugin_mapr.plugins.mapr.services.maprfs.maprfs as mfs
|
||||
|
||||
create_path = lambda p: 'sudo -u mapr hadoop fs -mkdir %s' % p
|
||||
check_path = 'sudo -u mapr hadoop fs -ls %s'
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
|
||||
HTTP_FS = np.NodeProcess(
|
@ -19,27 +19,27 @@ import string
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.services.hbase.hbase as hbase
|
||||
import sahara.plugins.mapr.services.hive.hive as hive
|
||||
import sahara.plugins.mapr.services.httpfs.httpfs as httpfs
|
||||
import sahara.plugins.mapr.services.impala.impala as impala
|
||||
import sahara.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||
import sahara.plugins.mapr.services.mysql.mysql as mysql
|
||||
import sahara.plugins.mapr.services.oozie.oozie as oozie
|
||||
import sahara.plugins.mapr.services.sentry.sentry as sentry
|
||||
import sahara.plugins.mapr.services.spark.spark as spark
|
||||
import sahara.plugins.mapr.services.sqoop.sqoop2 as sqoop
|
||||
import sahara.plugins.mapr.services.yarn.yarn as yarn
|
||||
import sahara.plugins.mapr.util.event_log as el
|
||||
import sahara.plugins.mapr.util.general as g
|
||||
import sahara.plugins.mapr.util.password_utils as pu
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara.plugins.provisioning as p
|
||||
import sahara.utils.files as files
|
||||
import sahara.plugins.utils as utils
|
||||
from sahara_plugin_mapr.i18n import _
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.services.hbase.hbase as hbase
|
||||
import sahara_plugin_mapr.plugins.mapr.services.hive.hive as hive
|
||||
import sahara_plugin_mapr.plugins.mapr.services.httpfs.httpfs as httpfs
|
||||
import sahara_plugin_mapr.plugins.mapr.services.impala.impala as impala
|
||||
import sahara_plugin_mapr.plugins.mapr.services.mapreduce.mapreduce as mr
|
||||
import sahara_plugin_mapr.plugins.mapr.services.mysql.mysql as mysql
|
||||
import sahara_plugin_mapr.plugins.mapr.services.oozie.oozie as oozie
|
||||
import sahara_plugin_mapr.plugins.mapr.services.sentry.sentry as sentry
|
||||
import sahara_plugin_mapr.plugins.mapr.services.spark.spark as spark
|
||||
import sahara_plugin_mapr.plugins.mapr.services.sqoop.sqoop2 as sqoop
|
||||
import sahara_plugin_mapr.plugins.mapr.services.yarn.yarn as yarn
|
||||
import sahara_plugin_mapr.plugins.mapr.util.event_log as el
|
||||
import sahara_plugin_mapr.plugins.mapr.util.general as g
|
||||
import sahara_plugin_mapr.plugins.mapr.util.password_utils as pu
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -102,7 +102,8 @@ class Hue(s.Service):
|
||||
# hue.ini
|
||||
hue_ini = bcf.TemplateFile("hue.ini")
|
||||
hue_ini.remote_path = self.conf_dir(cluster_context)
|
||||
hue_ini.parse(files.get_file_text(template % self.version))
|
||||
hue_ini.parse(utils.get_file_text(template % self.version,
|
||||
'sahara_plugin_mapr'))
|
||||
hue_ini.add_properties(self._get_hue_ini_props(cluster_context))
|
||||
hue_ini.add_property("thrift_version",
|
||||
configs[self.THRIFT_VERSION.name])
|
||||
@ -112,7 +113,8 @@ class Hue(s.Service):
|
||||
'resources/hue_sh_%s.template'
|
||||
hue_sh = bcf.TemplateFile("hue.sh")
|
||||
hue_sh.remote_path = self.home_dir(cluster_context) + '/bin'
|
||||
hue_sh.parse(files.get_file_text(hue_sh_template % self.version))
|
||||
hue_sh.parse(utils.get_file_text(hue_sh_template % self.version,
|
||||
'sahara_plugin_mapr'))
|
||||
hue_sh.add_property('hadoop_version', cluster_context.hadoop_version)
|
||||
hue_sh.mode = 777
|
||||
|
||||
@ -339,14 +341,16 @@ class HueLivyV310(s.Service):
|
||||
livy_conf_template = 'plugins/mapr/services/hue/' \
|
||||
'resources/livy_conf_%s.template'
|
||||
livy_conf = bcf.TemplateFile("livy.conf")
|
||||
livy_conf.parse(files.get_file_text(livy_conf_template % self.version))
|
||||
livy_conf.parse(utils.get_file_text(livy_conf_template % self.version,
|
||||
'sahara_plugin_mapr'))
|
||||
livy_conf.remote_path = self.home_dir(cluster_context) + '/conf'
|
||||
|
||||
livy_sh_template = 'plugins/mapr/services/hue/' \
|
||||
'resources/livy_sh_%s.template'
|
||||
livy_sh = bcf.TemplateFile("livy-env.sh")
|
||||
livy_sh.remote_path = self.home_dir(cluster_context) + '/conf'
|
||||
livy_sh.parse(files.get_file_text(livy_sh_template % self.version))
|
||||
livy_sh.parse(utils.get_file_text(livy_sh_template % self.version,
|
||||
'sahara_plugin_mapr'))
|
||||
livy_sh.add_property('hadoop_version', cluster_context.hadoop_version)
|
||||
livy_sh.add_property('spark_version', spark.SparkOnYarnV201().version)
|
||||
livy_sh.mode = 777
|
@ -13,15 +13,15 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.services.hbase.hbase as hbase
|
||||
import sahara.plugins.mapr.services.hive.hive as hive
|
||||
import sahara.plugins.mapr.services.sentry.sentry as sentry
|
||||
import sahara.plugins.mapr.util.general as g
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara.utils.files as files
|
||||
import sahara.plugins.utils as utils
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.configuration_file as bcf
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.services.hbase.hbase as hbase
|
||||
import sahara_plugin_mapr.plugins.mapr.services.hive.hive as hive
|
||||
import sahara_plugin_mapr.plugins.mapr.services.sentry.sentry as sentry
|
||||
import sahara_plugin_mapr.plugins.mapr.util.general as g
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
IMPALA_SERVER = np.NodeProcess(
|
||||
name='impalaserver',
|
||||
@ -82,7 +82,7 @@ class Impala(s.Service):
|
||||
impala_env.remote_path = self.conf_dir(cluster_context)
|
||||
if instance:
|
||||
impala_env.fetch(instance)
|
||||
impala_env.parse(files.get_file_text(defaults))
|
||||
impala_env.parse(utils.get_file_text(defaults, 'sahara_plugin_mapr'))
|
||||
impala_env.add_properties(self._get_impala_env_props(cluster_context))
|
||||
sentry_host = cluster_context.get_instance(sentry.SENTRY)
|
||||
if sentry_host:
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
KAFKA = np.NodeProcess(
|
||||
name='kafka',
|
@ -13,9 +13,9 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import sahara.plugins.mapr.domain.node_process as np
|
||||
import sahara.plugins.mapr.domain.service as s
|
||||
import sahara.plugins.mapr.util.validation_utils as vu
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.node_process as np
|
||||
import sahara_plugin_mapr.plugins.mapr.domain.service as s
|
||||
import sahara_plugin_mapr.plugins.mapr.util.validation_utils as vu
|
||||
|
||||
MAHOUT = np.NodeProcess(
|
||||
name='mahout',
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user