Plugins splitted from sahara core
Change-Id: I43e0beec6508f93a436a150749bfa23571986b9d
This commit is contained in:
parent
fda734808a
commit
0637de5fdb
|
@ -1,3 +1,3 @@
|
||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
test_path=./sahara/tests/unit
|
test_path=./sahara_plugin_vanilla/tests/unit
|
||||||
top_dir=./
|
top_dir=./
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
========================
|
||||||
|
Team and repository tags
|
||||||
|
========================
|
||||||
|
|
||||||
|
.. image:: https://governance.openstack.org/tc/badges/sahara.svg
|
||||||
|
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||||
|
|
||||||
|
.. Change things from this point on
|
||||||
|
|
||||||
|
OpenStack Data Processing ("Sahara") project
|
||||||
|
============================================
|
||||||
|
|
||||||
|
Sahara at wiki.openstack.org: https://wiki.openstack.org/wiki/Sahara
|
||||||
|
|
||||||
|
Storyboard project: https://storyboard.openstack.org/#!/project/935
|
||||||
|
|
||||||
|
Sahara docs site: https://docs.openstack.org/sahara/latest/
|
||||||
|
|
||||||
|
Roadmap: https://wiki.openstack.org/wiki/Sahara/Roadmap
|
||||||
|
|
||||||
|
Quickstart guide: https://docs.openstack.org/sahara/latest/user/quickstart.html
|
||||||
|
|
||||||
|
How to participate: https://docs.openstack.org/sahara/latest/contributor/how-to-participate.html
|
||||||
|
|
||||||
|
Source: https://git.openstack.org/cgit/openstack/sahara
|
||||||
|
|
||||||
|
Bugs and feature requests: https://storyboard.openstack.org/#!/project/935
|
||||||
|
|
||||||
|
Release notes: https://docs.openstack.org/releasenotes/sahara/
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
# The order of packages is significant, because pip processes them in the order
|
||||||
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
|
# process, which may cause wedges in the gate later.
|
||||||
|
openstackdocstheme>=1.18.1 # Apache-2.0
|
||||||
|
os-api-ref>=1.4.0 # Apache-2.0
|
||||||
|
reno>=2.5.0 # Apache-2.0
|
||||||
|
sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
|
||||||
|
sphinxcontrib-httpdomain>=1.3.0 # BSD
|
||||||
|
whereto>=0.3.0 # Apache-2.0
|
|
@ -0,0 +1,34 @@
|
||||||
|
py27 develop-inst-nodeps: /home/tenobreg/coding/upstream/sahara/sahara
|
||||||
|
py27 installed: alabaster==0.7.11,alembic==1.0.0,amqp==2.3.2,appdirs==1.4.3,asn1crypto==0.24.0,astroid==1.3.8,Babel==2.6.0,bandit==1.5.0,bashate==0.6.0,bcrypt==3.1.4,botocore==1.10.62,cachetools==2.1.0,castellan==0.18.0,certifi==2018.4.16,cffi==1.11.5,chardet==3.0.4,click==6.7,cliff==2.13.0,cmd2==0.8.8,contextlib2==0.5.5,coverage==4.5.1,cryptography==2.3,debtcollector==1.20.0,decorator==4.3.0,deprecation==2.0.5,doc8==0.8.0,docutils==0.14,dogpile.cache==0.6.6,dulwich==0.19.5,enum-compat==0.0.2,enum34==1.1.6,eventlet==0.20.0,extras==1.0.0,fasteners==0.14.1,fixtures==3.0.0,flake8==2.5.5,Flask==1.0.2,funcsigs==1.0.2,functools32==3.2.3.post2,future==0.16.0,futures==3.2.0,futurist==1.7.0,gitdb2==2.0.4,GitPython==2.1.11,greenlet==0.4.13,hacking==0.12.0,idna==2.7,imagesize==1.0.0,ipaddress==1.0.22,iso8601==0.1.12,itsdangerous==0.24,Jinja2==2.10,jmespath==0.9.3,jsonpatch==1.23,jsonpointer==2.0,jsonschema==2.6.0,keystoneauth1==3.10.0,keystonemiddleware==5.2.0,kombu==4.2.1,linecache2==1.0.0,logilab-common==1.4.2,Mako==1.0.7,MarkupSafe==1.0,mccabe==0.2.1,mock==2.0.0,monotonic==1.5,mox3==0.26.0,msgpack==0.5.6,munch==2.3.2,netaddr==0.7.19,netifaces==0.10.7,openstackdocstheme==1.22.0,openstacksdk==0.17.2,os-api-ref==1.5.0,os-client-config==1.31.2,os-service-types==1.3.0,os-testr==1.0.0,osc-lib==1.11.1,oslo.cache==1.30.1,oslo.concurrency==3.27.0,oslo.config==6.4.0,oslo.context==2.21.0,oslo.db==4.40.0,oslo.i18n==3.21.0,oslo.log==3.39.0,oslo.messaging==8.1.0,oslo.middleware==3.36.0,oslo.policy==1.38.1,oslo.rootwrap==5.14.1,oslo.serialization==2.27.0,oslo.service==1.31.3,oslo.utils==3.36.4,oslotest==3.6.0,packaging==17.1,paramiko==2.4.1,Paste==2.0.3,PasteDeploy==1.5.2,pbr==4.2.0,pep8==1.5.7,prettytable==0.7.2,psycopg2==2.7.5,pyasn1==0.4.3,pycadf==2.8.0,pycparser==2.18,pyflakes==0.8.1,Pygments==2.2.0,pyinotify==0.9.6,pylint==1.4.5,PyMySQL==0.9.2,PyNaCl==1.2.1,pyOpenSSL==18.0.0,pyparsing==2.2.0,pyperclip==1.6.4,python-barbicanclient==4.7.0,python-cinderclient==4.0.1,python-dateutil==2.7.3,python-editor==1.0.3,python-glanceclient==2.12.1,python-heatclient==1.16.1,python-keystoneclient==3.17.0,python-manilaclient==1.24.1,python-mimeparse==1.6.0,python-neutronclient==6.9.0,python-novaclient==11.0.0,python-openstackclient==3.16.0,python-saharaclient==2.0.0,python-subunit==1.3.0,python-swiftclient==3.6.0,pytz==2018.5,PyYAML==3.13,reno==2.9.2,repoze.lru==0.7,requests==2.19.1,requestsexceptions==1.4.0,restructuredtext-lint==1.1.3,rfc3986==1.1.0,Routes==2.4.1,-e git+https://github.com/openstack/sahara.git@efb05b3624044f307168d0b5da888132f51aebb7#egg=sahara,simplejson==3.16.0,six==1.11.0,smmap2==2.0.4,snowballstemmer==1.2.1,Sphinx==1.7.6,sphinxcontrib-httpdomain==1.7.0,sphinxcontrib-websupport==1.1.0,SQLAlchemy==1.2.10,sqlalchemy-migrate==0.11.0,sqlparse==0.2.4,statsd==3.2.2,stestr==2.1.0,stevedore==1.29.0,subprocess32==3.5.2,Tempita==0.5.2,tenacity==4.12.0,testresources==2.0.1,testscenarios==0.5.0,testtools==2.3.0,tooz==1.62.0,traceback2==1.4.0,typing==3.6.4,unicodecsv==0.14.1,unittest2==1.1.0,urllib3==1.23,vine==1.1.4,voluptuous==0.11.1,warlock==1.3.0,wcwidth==0.1.7,WebOb==1.8.2,Werkzeug==0.14.1,wrapt==1.10.11
|
||||||
|
py27 runtests: PYTHONHASHSEED='839100177'
|
||||||
|
py27 runtests: commands[0] | ostestr
|
||||||
|
|
||||||
|
=========================
|
||||||
|
Failures during discovery
|
||||||
|
=========================
|
||||||
|
--- import errors ---
|
||||||
|
Failed to import test module: sahara.tests.unit.service.edp.spark.test_shell
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/home/tenobreg/coding/upstream/sahara/sahara/.tox/py27/lib/python2.7/site-packages/unittest2/loader.py", line 456, in _find_test_path
|
||||||
|
module = self._get_module_from_name(name)
|
||||||
|
File "/home/tenobreg/coding/upstream/sahara/sahara/.tox/py27/lib/python2.7/site-packages/unittest2/loader.py", line 395, in _get_module_from_name
|
||||||
|
__import__(name)
|
||||||
|
File "sahara/tests/unit/service/edp/spark/test_shell.py", line 18, in <module>
|
||||||
|
from sahara.plugins.spark import shell_engine
|
||||||
|
ImportError: No module named spark
|
||||||
|
|
||||||
|
Failed to import test module: sahara.tests.unit.service.edp.spark.test_spark
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/home/tenobreg/coding/upstream/sahara/sahara/.tox/py27/lib/python2.7/site-packages/unittest2/loader.py", line 456, in _find_test_path
|
||||||
|
module = self._get_module_from_name(name)
|
||||||
|
File "/home/tenobreg/coding/upstream/sahara/sahara/.tox/py27/lib/python2.7/site-packages/unittest2/loader.py", line 395, in _get_module_from_name
|
||||||
|
__import__(name)
|
||||||
|
File "sahara/tests/unit/service/edp/spark/test_spark.py", line 17, in <module>
|
||||||
|
from sahara.plugins.spark import edp_engine as spark_edp
|
||||||
|
ImportError: No module named spark
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
The above traceback was encountered during test discovery which imports all the found test modules in the specified test_path.
|
||||||
|
ERROR: InvocationError: '/home/tenobreg/coding/upstream/sahara/sahara/.tox/py27/bin/ostestr'
|
||||||
|
___________________________________ summary ____________________________________
|
||||||
|
ERROR: py27: commands failed
|
|
@ -0,0 +1,162 @@
|
||||||
|
alabaster==0.7.10
|
||||||
|
alembic==0.8.10
|
||||||
|
amqp==2.2.2
|
||||||
|
appdirs==1.4.3
|
||||||
|
asn1crypto==0.24.0
|
||||||
|
astroid==1.3.8
|
||||||
|
Babel==2.3.4
|
||||||
|
bandit==1.1.0
|
||||||
|
bashate==0.5.1
|
||||||
|
bcrypt==3.1.4
|
||||||
|
botocore==1.5.1
|
||||||
|
cachetools==2.0.1
|
||||||
|
castellan==0.16.0
|
||||||
|
certifi==2018.1.18
|
||||||
|
cffi==1.11.5
|
||||||
|
chardet==3.0.4
|
||||||
|
click==6.7
|
||||||
|
cliff==2.11.0
|
||||||
|
cmd2==0.8.1
|
||||||
|
contextlib2==0.5.5
|
||||||
|
coverage==4.0
|
||||||
|
cryptography==2.1.4
|
||||||
|
debtcollector==1.19.0
|
||||||
|
decorator==4.2.1
|
||||||
|
deprecation==2.0
|
||||||
|
doc8==0.6.0
|
||||||
|
docutils==0.14
|
||||||
|
dogpile.cache==0.6.5
|
||||||
|
dulwich==0.19.0
|
||||||
|
enum-compat==0.0.2
|
||||||
|
eventlet==0.18.2
|
||||||
|
extras==1.0.0
|
||||||
|
fasteners==0.14.1
|
||||||
|
fixtures==3.0.0
|
||||||
|
flake8==2.6.2
|
||||||
|
Flask==1.0.2
|
||||||
|
future==0.16.0
|
||||||
|
futurist==1.6.0
|
||||||
|
gitdb2==2.0.3
|
||||||
|
GitPython==2.1.8
|
||||||
|
greenlet==0.4.13
|
||||||
|
hacking==1.1.0
|
||||||
|
idna==2.6
|
||||||
|
imagesize==1.0.0
|
||||||
|
iso8601==0.1.11
|
||||||
|
itsdangerous==0.24
|
||||||
|
Jinja2==2.10
|
||||||
|
jmespath==0.9.3
|
||||||
|
jsonpatch==1.21
|
||||||
|
jsonpointer==2.0
|
||||||
|
jsonschema==2.6.0
|
||||||
|
keystoneauth1==3.4.0
|
||||||
|
keystonemiddleware==4.17.0
|
||||||
|
kombu==4.1.0
|
||||||
|
linecache2==1.0.0
|
||||||
|
logilab-common==1.4.1
|
||||||
|
Mako==1.0.7
|
||||||
|
MarkupSafe==1.0
|
||||||
|
mccabe==0.2.1
|
||||||
|
mock==2.0.0
|
||||||
|
monotonic==1.4
|
||||||
|
mox3==0.25.0
|
||||||
|
msgpack==0.5.6
|
||||||
|
munch==2.2.0
|
||||||
|
netaddr==0.7.19
|
||||||
|
netifaces==0.10.6
|
||||||
|
openstackdocstheme==1.18.1
|
||||||
|
openstacksdk==0.12.0
|
||||||
|
os-api-ref==1.4.0
|
||||||
|
os-client-config==1.29.0
|
||||||
|
os-service-types==1.2.0
|
||||||
|
osc-lib==1.10.0
|
||||||
|
oslo.cache==1.29.0
|
||||||
|
oslo.concurrency==3.26.0
|
||||||
|
oslo.config==5.2.0
|
||||||
|
oslo.context==2.19.2
|
||||||
|
oslo.db==4.27.0
|
||||||
|
oslo.i18n==3.15.3
|
||||||
|
oslo.log==3.36.0
|
||||||
|
oslo.messaging==5.29.0
|
||||||
|
oslo.middleware==3.31.0
|
||||||
|
oslo.policy==1.30.0
|
||||||
|
oslo.rootwrap==5.8.0
|
||||||
|
oslo.serialization==2.18.0
|
||||||
|
oslo.service==1.24.0
|
||||||
|
oslo.upgradecheck==0.1.0
|
||||||
|
oslo.utils==3.33.0
|
||||||
|
oslotest==3.2.0
|
||||||
|
packaging==17.1
|
||||||
|
paramiko==2.0.0
|
||||||
|
Paste==2.0.3
|
||||||
|
PasteDeploy==1.5.2
|
||||||
|
pbr==2.0.0
|
||||||
|
pika-pool==0.1.3
|
||||||
|
pika==0.10.0
|
||||||
|
prettytable==0.7.2
|
||||||
|
psycopg2==2.6.2
|
||||||
|
pyasn1==0.4.2
|
||||||
|
pycadf==2.7.0
|
||||||
|
pycparser==2.18
|
||||||
|
pycodestyle==2.4.0
|
||||||
|
pyflakes==0.8.1
|
||||||
|
Pygments==2.2.0
|
||||||
|
pyinotify==0.9.6
|
||||||
|
pylint==1.4.5
|
||||||
|
PyMySQL==0.7.6
|
||||||
|
PyNaCl==1.2.1
|
||||||
|
pyOpenSSL==17.5.0
|
||||||
|
pyparsing==2.2.0
|
||||||
|
pyperclip==1.6.0
|
||||||
|
python-barbicanclient==4.6.0
|
||||||
|
python-cinderclient==3.3.0
|
||||||
|
python-dateutil==2.7.0
|
||||||
|
python-editor==1.0.3
|
||||||
|
python-glanceclient==2.8.0
|
||||||
|
python-heatclient==1.10.0
|
||||||
|
python-keystoneclient==3.8.0
|
||||||
|
python-manilaclient==1.16.0
|
||||||
|
python-mimeparse==1.6.0
|
||||||
|
python-neutronclient==6.7.0
|
||||||
|
python-novaclient==9.1.0
|
||||||
|
python-openstackclient==3.14.0
|
||||||
|
python-saharaclient==1.4.0
|
||||||
|
python-subunit==1.2.0
|
||||||
|
python-swiftclient==3.2.0
|
||||||
|
pytz==2018.3
|
||||||
|
PyYAML==3.12
|
||||||
|
reno==2.5.0
|
||||||
|
repoze.lru==0.7
|
||||||
|
requests==2.14.2
|
||||||
|
requestsexceptions==1.4.0
|
||||||
|
restructuredtext-lint==1.1.3
|
||||||
|
rfc3986==1.1.0
|
||||||
|
Routes==2.4.1
|
||||||
|
simplejson==3.13.2
|
||||||
|
six==1.10.0
|
||||||
|
smmap2==2.0.3
|
||||||
|
snowballstemmer==1.2.1
|
||||||
|
Sphinx==1.6.2
|
||||||
|
sphinxcontrib-httpdomain==1.3.0
|
||||||
|
sphinxcontrib-websupport==1.0.1
|
||||||
|
sqlalchemy-migrate==0.11.0
|
||||||
|
SQLAlchemy==1.0.10
|
||||||
|
sqlparse==0.2.4
|
||||||
|
statsd==3.2.2
|
||||||
|
stestr==1.0.0
|
||||||
|
stevedore==1.20.0
|
||||||
|
Tempita==0.5.2
|
||||||
|
tenacity==4.9.0
|
||||||
|
testresources==2.0.0
|
||||||
|
testscenarios==0.4
|
||||||
|
testtools==2.2.0
|
||||||
|
tooz==1.58.0
|
||||||
|
traceback2==1.4.0
|
||||||
|
unittest2==1.1.0
|
||||||
|
urllib3==1.22
|
||||||
|
vine==1.1.4
|
||||||
|
voluptuous==0.11.1
|
||||||
|
warlock==1.3.0
|
||||||
|
WebOb==1.7.1
|
||||||
|
Werkzeug==0.14.1
|
||||||
|
wrapt==1.10.11
|
|
@ -39,6 +39,7 @@ python-swiftclient>=3.2.0 # Apache-2.0
|
||||||
python-neutronclient>=6.7.0 # Apache-2.0
|
python-neutronclient>=6.7.0 # Apache-2.0
|
||||||
python-heatclient>=1.10.0 # Apache-2.0
|
python-heatclient>=1.10.0 # Apache-2.0
|
||||||
python-glanceclient>=2.8.0 # Apache-2.0
|
python-glanceclient>=2.8.0 # Apache-2.0
|
||||||
|
sahara
|
||||||
six>=1.10.0 # MIT
|
six>=1.10.0 # MIT
|
||||||
stevedore>=1.20.0 # Apache-2.0
|
stevedore>=1.20.0 # Apache-2.0
|
||||||
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
||||||
|
|
|
@ -13,14 +13,15 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.service.edp.oozie.workflow_creator import workflow_factory
|
from sahara.plugins import edp
|
||||||
from sahara.utils import xmlutils
|
from sahara.plugins import utils
|
||||||
|
|
||||||
|
|
||||||
def get_possible_hive_config_from(file_name):
|
def get_possible_hive_config_from(file_name):
|
||||||
'''Return the possible configs, args, params for a Hive job.'''
|
'''Return the possible configs, args, params for a Hive job.'''
|
||||||
config = {
|
config = {
|
||||||
'configs': xmlutils.load_hadoop_xml_defaults(file_name),
|
'configs': utils.load_hadoop_xml_defaults(file_name,
|
||||||
|
'sahara_plugin_vanilla'),
|
||||||
'params': {}
|
'params': {}
|
||||||
}
|
}
|
||||||
return config
|
return config
|
||||||
|
@ -31,14 +32,15 @@ def get_possible_mapreduce_config_from(file_name):
|
||||||
config = {
|
config = {
|
||||||
'configs': get_possible_pig_config_from(file_name).get('configs')
|
'configs': get_possible_pig_config_from(file_name).get('configs')
|
||||||
}
|
}
|
||||||
config['configs'] += workflow_factory.get_possible_mapreduce_configs()
|
config['configs'] += edp.get_possible_mapreduce_configs()
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def get_possible_pig_config_from(file_name):
|
def get_possible_pig_config_from(file_name):
|
||||||
'''Return the possible configs, args, params for a Pig job.'''
|
'''Return the possible configs, args, params for a Pig job.'''
|
||||||
config = {
|
config = {
|
||||||
'configs': xmlutils.load_hadoop_xml_defaults(file_name),
|
'configs': utils.load_hadoop_xml_defaults(file_name,
|
||||||
|
'sahara_plugin_vanilla'),
|
||||||
'args': [],
|
'args': [],
|
||||||
'params': {}
|
'params': {}
|
||||||
}
|
}
|
|
@ -13,13 +13,13 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from sahara.plugins import edp
|
||||||
from sahara.plugins import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins import utils as u
|
from sahara.plugins import utils as u
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.service.edp.oozie import engine as edp_engine
|
|
||||||
|
|
||||||
|
|
||||||
class EdpOozieEngine(edp_engine.OozieJobEngine):
|
class EdpOozieEngine(edp.PluginsOozieJobEngine):
|
||||||
def get_hdfs_user(self):
|
def get_hdfs_user(self):
|
||||||
return 'hadoop'
|
return 'hadoop'
|
||||||
|
|
|
@ -18,21 +18,16 @@ from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from sahara import context
|
from sahara.plugins import castellan_utils as key_manager
|
||||||
from sahara.i18n import _
|
from sahara.plugins import context
|
||||||
|
from sahara.plugins import swift_helper as swift
|
||||||
|
from sahara.plugins import topology_helper as th
|
||||||
from sahara.plugins import utils
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import oozie_helper as o_helper
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import oozie_helper
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
from sahara.service.castellan import utils as key_manager
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.swift import swift_helper as swift
|
|
||||||
from sahara.topology import topology_helper as th
|
|
||||||
from sahara.utils import cluster_progress_ops as cpo
|
|
||||||
from sahara.utils import configs as s_cfg
|
|
||||||
from sahara.utils import files as f
|
|
||||||
from sahara.utils import proxy
|
|
||||||
from sahara.utils import xmlutils as x
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -61,8 +56,8 @@ def configure_cluster(pctx, cluster):
|
||||||
LOG.debug("Configuring cluster")
|
LOG.debug("Configuring cluster")
|
||||||
if (CONF.use_identity_api_v3 and CONF.use_domain_for_proxy_users and
|
if (CONF.use_identity_api_v3 and CONF.use_domain_for_proxy_users and
|
||||||
vu.get_hiveserver(cluster) and
|
vu.get_hiveserver(cluster) and
|
||||||
c_helper.is_swift_enabled(pctx, cluster)):
|
config_helper.is_swift_enabled(pctx, cluster)):
|
||||||
cluster = proxy.create_proxy_user_for_cluster(cluster)
|
cluster = utils.create_proxy_user_for_cluster(cluster)
|
||||||
|
|
||||||
instances = utils.get_instances(cluster)
|
instances = utils.get_instances(cluster)
|
||||||
configure_instances(pctx, instances)
|
configure_instances(pctx, instances)
|
||||||
|
@ -74,7 +69,7 @@ def configure_cluster(pctx, cluster):
|
||||||
def configure_zookeeper(cluster, instances=None):
|
def configure_zookeeper(cluster, instances=None):
|
||||||
zk_servers = vu.get_zk_servers(cluster)
|
zk_servers = vu.get_zk_servers(cluster)
|
||||||
if zk_servers:
|
if zk_servers:
|
||||||
zk_conf = c_helper.generate_zk_basic_config(cluster)
|
zk_conf = config_helper.generate_zk_basic_config(cluster)
|
||||||
zk_conf += _form_zk_servers_to_quorum(cluster, instances)
|
zk_conf += _form_zk_servers_to_quorum(cluster, instances)
|
||||||
_push_zk_configs_to_nodes(cluster, zk_conf, instances)
|
_push_zk_configs_to_nodes(cluster, zk_conf, instances)
|
||||||
|
|
||||||
|
@ -126,7 +121,7 @@ def _push_spark_configs_to_node(cluster, extra):
|
||||||
|
|
||||||
def _push_spark_configs_to_existing_node(spark_master, cluster, extra):
|
def _push_spark_configs_to_existing_node(spark_master, cluster, extra):
|
||||||
|
|
||||||
sp_home = c_helper.get_spark_home(cluster)
|
sp_home = config_helper.get_spark_home(cluster)
|
||||||
files = {
|
files = {
|
||||||
os.path.join(sp_home,
|
os.path.join(sp_home,
|
||||||
'conf/spark-env.sh'): extra['sp_master'],
|
'conf/spark-env.sh'): extra['sp_master'],
|
||||||
|
@ -160,13 +155,13 @@ def _extract_spark_configs_to_extra(cluster):
|
||||||
|
|
||||||
config_master = ''
|
config_master = ''
|
||||||
if sp_master is not None:
|
if sp_master is not None:
|
||||||
config_master = c_helper.generate_spark_env_configs(cluster)
|
config_master = config_helper.generate_spark_env_configs(cluster)
|
||||||
|
|
||||||
# Any node that might be used to run spark-submit will need
|
# Any node that might be used to run spark-submit will need
|
||||||
# these libs for swift integration
|
# these libs for swift integration
|
||||||
config_defaults = c_helper.generate_spark_executor_classpath(cluster)
|
config_defaults = config_helper.generate_spark_executor_classpath(cluster)
|
||||||
|
|
||||||
extra['job_cleanup'] = c_helper.generate_job_cleanup_config(cluster)
|
extra['job_cleanup'] = config_helper.generate_job_cleanup_config(cluster)
|
||||||
extra['sp_master'] = config_master
|
extra['sp_master'] = config_master
|
||||||
extra['sp_defaults'] = config_defaults
|
extra['sp_defaults'] = config_defaults
|
||||||
|
|
||||||
|
@ -177,7 +172,7 @@ def configure_instances(pctx, instances):
|
||||||
if len(instances) == 0:
|
if len(instances) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
cpo.add_provisioning_step(
|
utils.add_provisioning_step(
|
||||||
instances[0].cluster_id, _("Configure instances"), len(instances))
|
instances[0].cluster_id, _("Configure instances"), len(instances))
|
||||||
|
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
|
@ -185,7 +180,7 @@ def configure_instances(pctx, instances):
|
||||||
_configure_instance(pctx, instance)
|
_configure_instance(pctx, instance)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True)
|
@utils.event_wrapper(True)
|
||||||
def _configure_instance(pctx, instance):
|
def _configure_instance(pctx, instance):
|
||||||
_provisioning_configs(pctx, instance)
|
_provisioning_configs(pctx, instance)
|
||||||
_post_configuration(pctx, instance)
|
_post_configuration(pctx, instance)
|
||||||
|
@ -201,8 +196,8 @@ def _generate_configs(pctx, instance):
|
||||||
hadoop_xml_confs = _get_hadoop_configs(pctx, instance)
|
hadoop_xml_confs = _get_hadoop_configs(pctx, instance)
|
||||||
user_xml_confs, user_env_confs = _get_user_configs(
|
user_xml_confs, user_env_confs = _get_user_configs(
|
||||||
pctx, instance.node_group)
|
pctx, instance.node_group)
|
||||||
xml_confs = s_cfg.merge_configs(user_xml_confs, hadoop_xml_confs)
|
xml_confs = utils.merge_configs(user_xml_confs, hadoop_xml_confs)
|
||||||
env_confs = s_cfg.merge_configs(pctx['env_confs'], user_env_confs)
|
env_confs = utils.merge_configs(pctx['env_confs'], user_env_confs)
|
||||||
|
|
||||||
return xml_confs, env_confs
|
return xml_confs, env_confs
|
||||||
|
|
||||||
|
@ -249,20 +244,21 @@ def _get_hadoop_configs(pctx, instance):
|
||||||
}
|
}
|
||||||
confs['Hadoop'].update(hadoop_cfg)
|
confs['Hadoop'].update(hadoop_cfg)
|
||||||
|
|
||||||
oozie_cfg = o_helper.get_oozie_required_xml_configs(HADOOP_CONF_DIR)
|
oozie_cfg = oozie_helper.get_oozie_required_xml_configs(
|
||||||
if c_helper.is_mysql_enabled(pctx, cluster):
|
HADOOP_CONF_DIR)
|
||||||
oozie_cfg.update(o_helper.get_oozie_mysql_configs(cluster))
|
if config_helper.is_mysql_enabled(pctx, cluster):
|
||||||
|
oozie_cfg.update(oozie_helper.get_oozie_mysql_configs(cluster))
|
||||||
|
|
||||||
confs['JobFlow'] = oozie_cfg
|
confs['JobFlow'] = oozie_cfg
|
||||||
|
|
||||||
if c_helper.is_swift_enabled(pctx, cluster):
|
if config_helper.is_swift_enabled(pctx, cluster):
|
||||||
swift_configs = {}
|
swift_configs = {}
|
||||||
for config in swift.get_swift_configs():
|
for config in swift.get_swift_configs():
|
||||||
swift_configs[config['name']] = config['value']
|
swift_configs[config['name']] = config['value']
|
||||||
|
|
||||||
confs['Hadoop'].update(swift_configs)
|
confs['Hadoop'].update(swift_configs)
|
||||||
|
|
||||||
if c_helper.is_data_locality_enabled(pctx, cluster):
|
if config_helper.is_data_locality_enabled(pctx, cluster):
|
||||||
confs['Hadoop'].update(th.TOPOLOGY_CONFIG)
|
confs['Hadoop'].update(th.TOPOLOGY_CONFIG)
|
||||||
confs['Hadoop'].update({"topology.script.file.name":
|
confs['Hadoop'].update({"topology.script.file.name":
|
||||||
HADOOP_CONF_DIR + "/topology.sh"})
|
HADOOP_CONF_DIR + "/topology.sh"})
|
||||||
|
@ -277,7 +273,7 @@ def _get_hadoop_configs(pctx, instance):
|
||||||
'jdbc:derby:;databaseName=/opt/hive/metastore_db;create=true'
|
'jdbc:derby:;databaseName=/opt/hive/metastore_db;create=true'
|
||||||
}
|
}
|
||||||
|
|
||||||
if c_helper.is_mysql_enabled(pctx, cluster):
|
if config_helper.is_mysql_enabled(pctx, cluster):
|
||||||
hive_cfg.update({
|
hive_cfg.update({
|
||||||
'javax.jdo.option.ConnectionURL':
|
'javax.jdo.option.ConnectionURL':
|
||||||
'jdbc:mysql://%s/metastore' % hive_hostname,
|
'jdbc:mysql://%s/metastore' % hive_hostname,
|
||||||
|
@ -291,7 +287,7 @@ def _get_hadoop_configs(pctx, instance):
|
||||||
})
|
})
|
||||||
|
|
||||||
proxy_configs = cluster.cluster_configs.get('proxy_configs')
|
proxy_configs = cluster.cluster_configs.get('proxy_configs')
|
||||||
if proxy_configs and c_helper.is_swift_enabled(pctx, cluster):
|
if proxy_configs and config_helper.is_swift_enabled(pctx, cluster):
|
||||||
hive_cfg.update({
|
hive_cfg.update({
|
||||||
swift.HADOOP_SWIFT_USERNAME: proxy_configs['proxy_username'],
|
swift.HADOOP_SWIFT_USERNAME: proxy_configs['proxy_username'],
|
||||||
swift.HADOOP_SWIFT_PASSWORD: key_manager.get_secret(
|
swift.HADOOP_SWIFT_PASSWORD: key_manager.get_secret(
|
||||||
|
@ -311,8 +307,8 @@ def _get_user_configs(pctx, node_group):
|
||||||
cl_xml_confs, cl_env_confs = _separate_configs(
|
cl_xml_confs, cl_env_confs = _separate_configs(
|
||||||
node_group.cluster.cluster_configs, pctx['env_confs'])
|
node_group.cluster.cluster_configs, pctx['env_confs'])
|
||||||
|
|
||||||
xml_confs = s_cfg.merge_configs(cl_xml_confs, ng_xml_confs)
|
xml_confs = utils.merge_configs(cl_xml_confs, ng_xml_confs)
|
||||||
env_confs = s_cfg.merge_configs(cl_env_confs, ng_env_confs)
|
env_confs = utils.merge_configs(cl_env_confs, ng_env_confs)
|
||||||
return xml_confs, env_confs
|
return xml_confs, env_confs
|
||||||
|
|
||||||
|
|
||||||
|
@ -336,7 +332,7 @@ def _separate_configs(configs, all_env_configs):
|
||||||
def _generate_xml(configs):
|
def _generate_xml(configs):
|
||||||
xml_confs = {}
|
xml_confs = {}
|
||||||
for service, confs in six.iteritems(configs):
|
for service, confs in six.iteritems(configs):
|
||||||
xml_confs[service] = x.create_hadoop_xml(confs)
|
xml_confs[service] = utils.create_hadoop_xml(confs)
|
||||||
|
|
||||||
return xml_confs
|
return xml_confs
|
||||||
|
|
||||||
|
@ -417,8 +413,9 @@ def _post_configuration(pctx, instance):
|
||||||
'hadoop_secure_dn_log_dir': dirs['hadoop_secure_dn_log_dir'],
|
'hadoop_secure_dn_log_dir': dirs['hadoop_secure_dn_log_dir'],
|
||||||
'yarn_log_dir': dirs['yarn_log_dir']
|
'yarn_log_dir': dirs['yarn_log_dir']
|
||||||
}
|
}
|
||||||
post_conf_script = f.get_file_text(
|
post_conf_script = utils.get_file_text(
|
||||||
'plugins/vanilla/hadoop2/resources/post_conf.template')
|
'plugins/vanilla/hadoop2/resources/post_conf.template',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
post_conf_script = post_conf_script.format(**args)
|
post_conf_script = post_conf_script.format(**args)
|
||||||
|
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
|
@ -426,12 +423,11 @@ def _post_configuration(pctx, instance):
|
||||||
r.execute_command('chmod +x /tmp/post_conf.sh')
|
r.execute_command('chmod +x /tmp/post_conf.sh')
|
||||||
r.execute_command('sudo /tmp/post_conf.sh')
|
r.execute_command('sudo /tmp/post_conf.sh')
|
||||||
|
|
||||||
if c_helper.is_data_locality_enabled(pctx,
|
if config_helper.is_data_locality_enabled(pctx, instance.cluster):
|
||||||
instance.cluster):
|
|
||||||
t_script = HADOOP_CONF_DIR + '/topology.sh'
|
t_script = HADOOP_CONF_DIR + '/topology.sh'
|
||||||
r.write_file_to(t_script, f.get_file_text(
|
r.write_file_to(t_script, utils.get_file_text(
|
||||||
'plugins/vanilla/hadoop2/resources/topology.sh'),
|
'plugins/vanilla/hadoop2/resources/topology.sh',
|
||||||
run_as_root=True)
|
'sahara_plugin_vanilla'), run_as_root=True)
|
||||||
r.execute_command('chmod +x ' + t_script, run_as_root=True)
|
r.execute_command('chmod +x ' + t_script, run_as_root=True)
|
||||||
|
|
||||||
|
|
||||||
|
@ -456,10 +452,10 @@ def _make_hadoop_paths(paths, hadoop_dir):
|
||||||
return [path + hadoop_dir for path in paths]
|
return [path + hadoop_dir for path in paths]
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=_("Configure topology data"), param=('cluster', 1))
|
True, step=_("Configure topology data"), param=('cluster', 1))
|
||||||
def configure_topology_data(pctx, cluster):
|
def configure_topology_data(pctx, cluster):
|
||||||
if c_helper.is_data_locality_enabled(pctx, cluster):
|
if config_helper.is_data_locality_enabled(pctx, cluster):
|
||||||
LOG.warning("Node group awareness is not implemented in YARN yet "
|
LOG.warning("Node group awareness is not implemented in YARN yet "
|
||||||
"so enable_hypervisor_awareness set to False explicitly")
|
"so enable_hypervisor_awareness set to False explicitly")
|
||||||
tpl_map = th.generate_topology_map(cluster, is_node_awareness=False)
|
tpl_map = th.generate_topology_map(cluster, is_node_awareness=False)
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2014 Mirantis Inc.
|
# Copyright (c) 2014 Mirantis, Inc.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -16,12 +16,10 @@
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from sahara import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.i18n import _
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins import utils
|
from sahara.plugins import utils
|
||||||
from sahara.utils import files as f
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.utils import types
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
||||||
|
@ -165,7 +163,7 @@ def init_xml_configs(xml_confs):
|
||||||
if cfg.default_value in ["true", "false"]:
|
if cfg.default_value in ["true", "false"]:
|
||||||
cfg.config_type = "bool"
|
cfg.config_type = "bool"
|
||||||
cfg.default_value = (cfg.default_value == 'true')
|
cfg.default_value = (cfg.default_value == 'true')
|
||||||
elif types.is_int(cfg.default_value):
|
elif utils.is_int(cfg.default_value):
|
||||||
cfg.config_type = "int"
|
cfg.config_type = "int"
|
||||||
cfg.default_value = int(cfg.default_value)
|
cfg.default_value = int(cfg.default_value)
|
||||||
if config['name'] in CLUSTER_WIDE_CONFS:
|
if config['name'] in CLUSTER_WIDE_CONFS:
|
||||||
|
@ -245,7 +243,7 @@ def get_config_value(pctx, service, name, cluster=None):
|
||||||
if c.applicable_target == service and c.name == name:
|
if c.applicable_target == service and c.name == name:
|
||||||
return c.default_value
|
return c.default_value
|
||||||
|
|
||||||
raise ex.NotFoundException(
|
raise ex.PluginNotFoundException(
|
||||||
{"name": name, "service": service},
|
{"name": name, "service": service},
|
||||||
_("Unable to get parameter '%(name)s' from service %(service)s"))
|
_("Unable to get parameter '%(name)s' from service %(service)s"))
|
||||||
|
|
||||||
|
@ -303,10 +301,12 @@ def generate_job_cleanup_config(cluster):
|
||||||
(args['minimum_cleanup_megabytes'] > 0
|
(args['minimum_cleanup_megabytes'] > 0
|
||||||
and args['minimum_cleanup_seconds'] > 0))}
|
and args['minimum_cleanup_seconds'] > 0))}
|
||||||
if job_conf['valid']:
|
if job_conf['valid']:
|
||||||
job_conf['cron'] = f.get_file_text(
|
job_conf['cron'] = utils.get_file_text(
|
||||||
'plugins/vanilla/hadoop2/resources/spark-cleanup.cron'),
|
'plugins/vanilla/hadoop2/resources/spark-cleanup.cron',
|
||||||
job_cleanup_script = f.get_file_text(
|
'sahara_plugin_vanilla'),
|
||||||
'plugins/vanilla/hadoop2/resources/tmp-cleanup.sh.template')
|
job_cleanup_script = utils.get_file_text(
|
||||||
|
'plugins/vanilla/hadoop2/resources/tmp-cleanup.sh.template',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
job_conf['script'] = job_cleanup_script.format(**args)
|
job_conf['script'] = job_cleanup_script.format(**args)
|
||||||
return job_conf
|
return job_conf
|
||||||
|
|
||||||
|
@ -324,6 +324,7 @@ def generate_zk_basic_config(cluster):
|
||||||
'synclimit': utils.get_config_value_or_default(
|
'synclimit': utils.get_config_value_or_default(
|
||||||
"ZooKeeper", "syncLimit", cluster)
|
"ZooKeeper", "syncLimit", cluster)
|
||||||
}
|
}
|
||||||
zoo_cfg = f.get_file_text(
|
zoo_cfg = utils.get_file_text(
|
||||||
'plugins/vanilla/hadoop2/resources/zoo_sample.cfg')
|
'plugins/vanilla/hadoop2/resources/zoo_sample.cfg',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
return zoo_cfg.format(**args)
|
return zoo_cfg.format(**args)
|
|
@ -13,14 +13,14 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.plugins.vanilla import edp_engine
|
from sahara.plugins import edp
|
||||||
from sahara.service.edp import hdfs_helper
|
from sahara_plugin_vanilla.plugins.vanilla import edp_engine
|
||||||
|
|
||||||
|
|
||||||
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
||||||
|
|
||||||
def create_hdfs_dir(self, remote, dir_name):
|
def create_hdfs_dir(self, remote, dir_name):
|
||||||
hdfs_helper.create_dir_hadoop2(remote, dir_name, self.get_hdfs_user())
|
edp.create_dir_hadoop2(remote, dir_name, self.get_hdfs_user())
|
||||||
|
|
||||||
def get_resource_manager_uri(self, cluster):
|
def get_resource_manager_uri(self, cluster):
|
||||||
return cluster['info']['YARN']['ResourceManager']
|
return cluster['info']['YARN']['ResourceManager']
|
|
@ -18,12 +18,10 @@ from castellan import key_manager
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
|
||||||
from sahara import conductor
|
from sahara.plugins import conductor
|
||||||
from sahara import context
|
from sahara.plugins import context
|
||||||
from sahara.utils import cluster as utils
|
from sahara.plugins import utils
|
||||||
from sahara.utils import crypto
|
|
||||||
|
|
||||||
cond = conductor.API
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,15 +70,15 @@ def provision_keypairs(cluster, instances=None):
|
||||||
# cluster created before mitaka, skipping provisioning
|
# cluster created before mitaka, skipping provisioning
|
||||||
return
|
return
|
||||||
if not keypair:
|
if not keypair:
|
||||||
private, public = crypto.generate_key_pair()
|
private, public = utils.generate_key_pair()
|
||||||
keypair = {'public': public, 'private': private}
|
keypair = {'public': public, 'private': private}
|
||||||
extra['vanilla_keypair'] = keypair
|
extra['vanilla_keypair'] = keypair
|
||||||
extra['vanilla_keypair']['private'] = _store_secret(
|
extra['vanilla_keypair']['private'] = _store_secret(
|
||||||
keypair['private'])
|
keypair['private'])
|
||||||
cond.cluster_update(context.ctx(), cluster, {'extra': extra})
|
conductor.cluster_update(context.ctx(), cluster, {'extra': extra})
|
||||||
else:
|
else:
|
||||||
keypair['private'] = _get_secret(keypair['private'])
|
keypair['private'] = _get_secret(keypair['private'])
|
||||||
with context.ThreadGroup() as tg:
|
with context.PluginsThreadGroup() as tg:
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
tg.spawn(
|
tg.spawn(
|
||||||
'provision-key-%s' % instance.instance_name,
|
'provision-key-%s' % instance.instance_name,
|
|
@ -12,7 +12,7 @@
|
||||||
# implied.
|
# implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
|
|
||||||
|
|
||||||
def get_oozie_required_xml_configs(hadoop_conf_dir):
|
def get_oozie_required_xml_configs(hadoop_conf_dir):
|
|
@ -17,34 +17,31 @@ import os
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
from sahara import context
|
from sahara.plugins import context
|
||||||
from sahara.i18n import _
|
from sahara.plugins import edp
|
||||||
from sahara.plugins import utils as pu
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import oozie_helper
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import oozie_helper
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
from sahara.utils import cluster_progress_ops as cpo
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.utils import edp
|
|
||||||
from sahara.utils import files
|
|
||||||
from sahara.utils import poll_utils
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def start_dn_nm_processes(instances):
|
def start_dn_nm_processes(instances):
|
||||||
filternames = ['datanode', 'nodemanager']
|
filternames = ['datanode', 'nodemanager']
|
||||||
instances = pu.instances_with_services(instances, filternames)
|
instances = utils.instances_with_services(instances, filternames)
|
||||||
|
|
||||||
if len(instances) == 0:
|
if len(instances) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
cpo.add_provisioning_step(
|
utils.add_provisioning_step(
|
||||||
instances[0].cluster_id,
|
instances[0].cluster_id,
|
||||||
pu.start_process_event_message("DataNodes, NodeManagers"),
|
utils.start_process_event_message("DataNodes, NodeManagers"),
|
||||||
len(instances))
|
len(instances))
|
||||||
|
|
||||||
with context.ThreadGroup() as tg:
|
with context.PluginsThreadGroup() as tg:
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
with context.set_current_instance_id(instance.instance_id):
|
with context.set_current_instance_id(instance.instance_id):
|
||||||
processes = set(instance.node_group.node_processes)
|
processes = set(instance.node_group.node_processes)
|
||||||
|
@ -53,7 +50,7 @@ def start_dn_nm_processes(instances):
|
||||||
_start_processes, instance, list(processes))
|
_start_processes, instance, list(processes))
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True)
|
@utils.event_wrapper(True)
|
||||||
def _start_processes(instance, processes):
|
def _start_processes(instance, processes):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
if 'datanode' in processes:
|
if 'datanode' in processes:
|
||||||
|
@ -74,21 +71,23 @@ def start_yarn_process(instance, process):
|
||||||
'sudo su - -c "yarn-daemon.sh start %s" hadoop' % process)
|
'sudo su - -c "yarn-daemon.sh start %s" hadoop' % process)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True, step=pu.start_process_event_message("HistoryServer"))
|
@utils.event_wrapper(
|
||||||
|
True, step=utils.start_process_event_message("HistoryServer"))
|
||||||
def start_historyserver(instance):
|
def start_historyserver(instance):
|
||||||
instance.remote().execute_command(
|
instance.remote().execute_command(
|
||||||
'sudo su - -c "mr-jobhistory-daemon.sh start historyserver" hadoop')
|
'sudo su - -c "mr-jobhistory-daemon.sh start historyserver" hadoop')
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True, step=pu.start_process_event_message("Oozie"))
|
@utils.event_wrapper(True, step=utils.start_process_event_message("Oozie"))
|
||||||
def start_oozie_process(pctx, instance):
|
def start_oozie_process(pctx, instance):
|
||||||
with context.set_current_instance_id(instance.instance_id):
|
with context.set_current_instance_id(instance.instance_id):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
if c_helper.is_mysql_enabled(pctx, instance.cluster):
|
if config_helper.is_mysql_enabled(pctx, instance.cluster):
|
||||||
_start_mysql(r)
|
_start_mysql(r)
|
||||||
LOG.debug("Creating Oozie DB Schema")
|
LOG.debug("Creating Oozie DB Schema")
|
||||||
sql_script = files.get_file_text(
|
sql_script = utils.get_file_text(
|
||||||
'plugins/vanilla/hadoop2/resources/create_oozie_db.sql')
|
'plugins/vanilla/hadoop2/resources/create_oozie_db.sql',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
password = oozie_helper.get_oozie_mysql_configs(
|
password = oozie_helper.get_oozie_mysql_configs(
|
||||||
instance.cluster)[
|
instance.cluster)[
|
||||||
|
@ -105,10 +104,10 @@ def start_oozie_process(pctx, instance):
|
||||||
_start_oozie(r)
|
_start_oozie(r)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=pu.start_process_event_message("Spark History Server"))
|
True, step=utils.start_process_event_message("Spark History Server"))
|
||||||
def start_spark_history_server(master):
|
def start_spark_history_server(master):
|
||||||
sp_home = c_helper.get_spark_home(master.cluster)
|
sp_home = config_helper.get_spark_home(master.cluster)
|
||||||
with context.set_current_instance_id(master.instance_id):
|
with context.set_current_instance_id(master.instance_id):
|
||||||
with master.remote() as r:
|
with master.remote() as r:
|
||||||
r.execute_command('sudo su - -c "bash %s" hadoop' % os.path.join(
|
r.execute_command('sudo su - -c "bash %s" hadoop' % os.path.join(
|
||||||
|
@ -116,12 +115,12 @@ def start_spark_history_server(master):
|
||||||
|
|
||||||
|
|
||||||
def start_zk_server(instances):
|
def start_zk_server(instances):
|
||||||
cpo.add_provisioning_step(
|
utils.add_provisioning_step(
|
||||||
instances[0].cluster_id,
|
instances[0].cluster_id,
|
||||||
pu.start_process_event_message("ZooKeeper"),
|
utils.start_process_event_message("ZooKeeper"),
|
||||||
len(instances))
|
len(instances))
|
||||||
|
|
||||||
with context.ThreadGroup() as tg:
|
with context.PluginsThreadGroup() as tg:
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
with context.set_current_instance_id(instance.instance_id):
|
with context.set_current_instance_id(instance.instance_id):
|
||||||
tg.spawn('ZK-start-processes-%s' % instance.instance_name,
|
tg.spawn('ZK-start-processes-%s' % instance.instance_name,
|
||||||
|
@ -135,19 +134,19 @@ def refresh_zk_servers(cluster, to_delete_instances=None):
|
||||||
if instance in instances:
|
if instance in instances:
|
||||||
instances.remove(instance)
|
instances.remove(instance)
|
||||||
|
|
||||||
cpo.add_provisioning_step(
|
utils.add_provisioning_step(
|
||||||
cluster.id,
|
cluster.id,
|
||||||
pu.start_process_event_message("ZooKeeper"),
|
utils.start_process_event_message("ZooKeeper"),
|
||||||
len(instances))
|
len(instances))
|
||||||
|
|
||||||
with context.ThreadGroup() as tg:
|
with context.PluginsThreadGroup() as tg:
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
with context.set_current_instance_id(instance.instance_id):
|
with context.set_current_instance_id(instance.instance_id):
|
||||||
tg.spawn('ZK-restart-processes-%s' % instance.instance_name,
|
tg.spawn('ZK-restart-processes-%s' % instance.instance_name,
|
||||||
_start_zk_processes, instance, 'restart')
|
_start_zk_processes, instance, 'restart')
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True)
|
@utils.event_wrapper(True)
|
||||||
def _start_zk_processes(instance, operation):
|
def _start_zk_processes(instance, operation):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
r.execute_command(
|
r.execute_command(
|
||||||
|
@ -160,15 +159,16 @@ def format_namenode(instance):
|
||||||
'sudo su - -c "hdfs namenode -format" hadoop')
|
'sudo su - -c "hdfs namenode -format" hadoop')
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=pu.start_process_event_message("Oozie"), param=('cluster', 0))
|
True,
|
||||||
|
step=utils.start_process_event_message("Oozie"), param=('cluster', 0))
|
||||||
def refresh_hadoop_nodes(cluster):
|
def refresh_hadoop_nodes(cluster):
|
||||||
nn = vu.get_namenode(cluster)
|
nn = vu.get_namenode(cluster)
|
||||||
nn.remote().execute_command(
|
nn.remote().execute_command(
|
||||||
'sudo su - -c "hdfs dfsadmin -refreshNodes" hadoop')
|
'sudo su - -c "hdfs dfsadmin -refreshNodes" hadoop')
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=_("Refresh %s nodes") % "YARN", param=('cluster', 0))
|
True, step=_("Refresh %s nodes") % "YARN", param=('cluster', 0))
|
||||||
def refresh_yarn_nodes(cluster):
|
def refresh_yarn_nodes(cluster):
|
||||||
rm = vu.get_resourcemanager(cluster)
|
rm = vu.get_resourcemanager(cluster)
|
||||||
|
@ -210,7 +210,7 @@ def _start_oozie(remote):
|
||||||
'sudo su - -c "/opt/oozie/bin/oozied.sh start" hadoop')
|
'sudo su - -c "/opt/oozie/bin/oozied.sh start" hadoop')
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=_("Await %s start up") % "DataNodes", param=('cluster', 0))
|
True, step=_("Await %s start up") % "DataNodes", param=('cluster', 0))
|
||||||
def await_datanodes(cluster):
|
def await_datanodes(cluster):
|
||||||
datanodes_count = len(vu.get_datanodes(cluster))
|
datanodes_count = len(vu.get_datanodes(cluster))
|
||||||
|
@ -219,9 +219,9 @@ def await_datanodes(cluster):
|
||||||
|
|
||||||
l_message = _("Waiting on %s datanodes to start up") % datanodes_count
|
l_message = _("Waiting on %s datanodes to start up") % datanodes_count
|
||||||
with vu.get_namenode(cluster).remote() as r:
|
with vu.get_namenode(cluster).remote() as r:
|
||||||
poll_utils.plugin_option_poll(
|
utils.plugin_option_poll(
|
||||||
cluster, _check_datanodes_count,
|
cluster, _check_datanodes_count,
|
||||||
c_helper.DATANODES_STARTUP_TIMEOUT, l_message, 1, {
|
config_helper.DATANODES_STARTUP_TIMEOUT, l_message, 1, {
|
||||||
'remote': r, 'count': datanodes_count})
|
'remote': r, 'count': datanodes_count})
|
||||||
|
|
||||||
|
|
||||||
|
@ -265,7 +265,8 @@ def _hive_metastore_start(remote):
|
||||||
" --service metastore > /dev/null &' hadoop")
|
" --service metastore > /dev/null &' hadoop")
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True, step=pu.start_process_event_message("HiveServer"))
|
@utils.event_wrapper(
|
||||||
|
True, step=utils.start_process_event_message("HiveServer"))
|
||||||
def start_hiveserver_process(pctx, instance):
|
def start_hiveserver_process(pctx, instance):
|
||||||
with context.set_current_instance_id(instance.instance_id):
|
with context.set_current_instance_id(instance.instance_id):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
|
@ -273,16 +274,15 @@ def start_hiveserver_process(pctx, instance):
|
||||||
_hive_copy_shared_conf(
|
_hive_copy_shared_conf(
|
||||||
r, edp.get_hive_shared_conf_path('hadoop'))
|
r, edp.get_hive_shared_conf_path('hadoop'))
|
||||||
|
|
||||||
if c_helper.is_mysql_enabled(pctx, instance.cluster):
|
if config_helper.is_mysql_enabled(pctx, instance.cluster):
|
||||||
oozie = vu.get_oozie(instance.node_group.cluster)
|
oozie = vu.get_oozie(instance.node_group.cluster)
|
||||||
if not oozie or instance.hostname() != oozie.hostname():
|
if not oozie or instance.hostname() != oozie.hostname():
|
||||||
_start_mysql(r)
|
_start_mysql(r)
|
||||||
|
|
||||||
version = instance.cluster.hadoop_version
|
version = instance.cluster.hadoop_version
|
||||||
sql_script = files.get_file_text(
|
sql_script = utils.get_file_text(
|
||||||
'plugins/vanilla/v{}/resources/create_hive_db.sql'.format(
|
'plugins/vanilla/v{}/resources/create_hive_db.sql'.format(
|
||||||
version.replace('.', '_'))
|
version.replace('.', '_')), 'sahara_plugin_vanilla')
|
||||||
)
|
|
||||||
|
|
||||||
sql_script = sql_script.replace(
|
sql_script = sql_script.replace(
|
||||||
'{{password}}', u.get_hive_password(instance.cluster))
|
'{{password}}', u.get_hive_password(instance.cluster))
|
|
@ -13,17 +13,14 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.i18n import _
|
from sahara.plugins import swift_helper
|
||||||
from sahara.plugins import utils as u
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import config
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config
|
||||||
from sahara.plugins.vanilla.hadoop2 import run_scripts as run
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as pu
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import run_scripts as run
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as pu
|
||||||
from sahara.swift import swift_helper
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.utils import cluster_progress_ops as cpo
|
|
||||||
from sahara.utils import poll_utils
|
|
||||||
|
|
||||||
|
|
||||||
HADOOP_CONF_DIR = config.HADOOP_CONF_DIR
|
HADOOP_CONF_DIR = config.HADOOP_CONF_DIR
|
||||||
|
|
||||||
|
@ -48,19 +45,20 @@ def _get_instances_with_service(instances, service):
|
||||||
if service in instance.node_group.node_processes]
|
if service in instance.node_group.node_processes]
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(True, step=_("Update include files"), param=('cluster', 0))
|
@utils.event_wrapper(
|
||||||
|
True, step=_("Update include files"), param=('cluster', 0))
|
||||||
def _update_include_files(cluster, dec_instances=None):
|
def _update_include_files(cluster, dec_instances=None):
|
||||||
dec_instances = dec_instances or []
|
dec_instances = dec_instances or []
|
||||||
dec_instances_ids = [instance.id for instance in dec_instances]
|
dec_instances_ids = [instance.id for instance in dec_instances]
|
||||||
|
|
||||||
instances = u.get_instances(cluster)
|
instances = utils.get_instances(cluster)
|
||||||
|
|
||||||
inst_filter = lambda inst: inst.id not in dec_instances_ids
|
inst_filter = lambda inst: inst.id not in dec_instances_ids
|
||||||
|
|
||||||
datanodes = filter(inst_filter, vu.get_datanodes(cluster))
|
datanodes = filter(inst_filter, vu.get_datanodes(cluster))
|
||||||
nodemanagers = filter(inst_filter, vu.get_nodemanagers(cluster))
|
nodemanagers = filter(inst_filter, vu.get_nodemanagers(cluster))
|
||||||
dn_hosts = u.generate_fqdn_host_names(datanodes)
|
dn_hosts = utils.generate_fqdn_host_names(datanodes)
|
||||||
nm_hosts = u.generate_fqdn_host_names(nodemanagers)
|
nm_hosts = utils.generate_fqdn_host_names(nodemanagers)
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
r.execute_command(
|
r.execute_command(
|
||||||
|
@ -97,9 +95,9 @@ def decommission_nodes(pctx, cluster, instances):
|
||||||
def _update_exclude_files(cluster, instances):
|
def _update_exclude_files(cluster, instances):
|
||||||
datanodes = _get_instances_with_service(instances, 'datanode')
|
datanodes = _get_instances_with_service(instances, 'datanode')
|
||||||
nodemanagers = _get_instances_with_service(instances, 'nodemanager')
|
nodemanagers = _get_instances_with_service(instances, 'nodemanager')
|
||||||
dn_hosts = u.generate_fqdn_host_names(datanodes)
|
dn_hosts = utils.generate_fqdn_host_names(datanodes)
|
||||||
nm_hosts = u.generate_fqdn_host_names(nodemanagers)
|
nm_hosts = utils.generate_fqdn_host_names(nodemanagers)
|
||||||
for instance in u.get_instances(cluster):
|
for instance in utils.get_instances(cluster):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
r.execute_command(
|
r.execute_command(
|
||||||
'sudo su - -c "echo \'%s\' > %s/dn-exclude" hadoop' % (
|
'sudo su - -c "echo \'%s\' > %s/dn-exclude" hadoop' % (
|
||||||
|
@ -110,7 +108,7 @@ def _update_exclude_files(cluster, instances):
|
||||||
|
|
||||||
|
|
||||||
def _clear_exclude_files(cluster):
|
def _clear_exclude_files(cluster):
|
||||||
for instance in u.get_instances(cluster):
|
for instance in utils.get_instances(cluster):
|
||||||
with instance.remote() as r:
|
with instance.remote() as r:
|
||||||
r.execute_command(
|
r.execute_command(
|
||||||
'sudo su - -c "echo > %s/dn-exclude" hadoop' % HADOOP_CONF_DIR)
|
'sudo su - -c "echo > %s/dn-exclude" hadoop' % HADOOP_CONF_DIR)
|
||||||
|
@ -127,21 +125,21 @@ def is_decommissioned(cluster, check_func, instances):
|
||||||
|
|
||||||
|
|
||||||
def _check_decommission(cluster, instances, check_func, option):
|
def _check_decommission(cluster, instances, check_func, option):
|
||||||
poll_utils.plugin_option_poll(
|
utils.plugin_option_poll(
|
||||||
cluster, is_decommissioned, option, _("Wait for decommissioning"),
|
cluster, is_decommissioned, option, _("Wait for decommissioning"),
|
||||||
5, {'cluster': cluster, 'check_func': check_func,
|
5, {'cluster': cluster, 'check_func': check_func,
|
||||||
'instances': instances})
|
'instances': instances})
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=_("Decommission %s") % "NodeManagers", param=('cluster', 0))
|
True, step=_("Decommission %s") % "NodeManagers", param=('cluster', 0))
|
||||||
def _check_nodemanagers_decommission(cluster, instances):
|
def _check_nodemanagers_decommission(cluster, instances):
|
||||||
_check_decommission(cluster, instances, pu.get_nodemanagers_status,
|
_check_decommission(cluster, instances, pu.get_nodemanagers_status,
|
||||||
c_helper.NODEMANAGERS_DECOMMISSIONING_TIMEOUT)
|
config_helper.NODEMANAGERS_DECOMMISSIONING_TIMEOUT)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=_("Decommission %s") % "DataNodes", param=('cluster', 0))
|
True, step=_("Decommission %s") % "DataNodes", param=('cluster', 0))
|
||||||
def _check_datanodes_decommission(cluster, instances):
|
def _check_datanodes_decommission(cluster, instances):
|
||||||
_check_decommission(cluster, instances, pu.get_datanodes_status,
|
_check_decommission(cluster, instances, pu.get_datanodes_status,
|
||||||
c_helper.DATANODES_DECOMMISSIONING_TIMEOUT)
|
config_helper.DATANODES_DECOMMISSIONING_TIMEOUT)
|
|
@ -14,9 +14,8 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.plugins import utils
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import run_scripts as run
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import run_scripts as run
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.utils import cluster_progress_ops as cpo
|
|
||||||
|
|
||||||
|
|
||||||
def start_namenode(cluster):
|
def start_namenode(cluster):
|
||||||
|
@ -24,7 +23,7 @@ def start_namenode(cluster):
|
||||||
_start_namenode(nn)
|
_start_namenode(nn)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=utils.start_process_event_message('NameNode'))
|
True, step=utils.start_process_event_message('NameNode'))
|
||||||
def _start_namenode(nn):
|
def _start_namenode(nn):
|
||||||
run.format_namenode(nn)
|
run.format_namenode(nn)
|
||||||
|
@ -37,7 +36,7 @@ def start_secondarynamenode(cluster):
|
||||||
_start_secondarynamenode(snn)
|
_start_secondarynamenode(snn)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=utils.start_process_event_message("SecondaryNameNodes"))
|
True, step=utils.start_process_event_message("SecondaryNameNodes"))
|
||||||
def _start_secondarynamenode(snn):
|
def _start_secondarynamenode(snn):
|
||||||
run.start_hadoop_process(snn, 'secondarynamenode')
|
run.start_hadoop_process(snn, 'secondarynamenode')
|
||||||
|
@ -49,7 +48,7 @@ def start_resourcemanager(cluster):
|
||||||
_start_resourcemanager(rm)
|
_start_resourcemanager(rm)
|
||||||
|
|
||||||
|
|
||||||
@cpo.event_wrapper(
|
@utils.event_wrapper(
|
||||||
True, step=utils.start_process_event_message('ResourceManager'))
|
True, step=utils.start_process_event_message('ResourceManager'))
|
||||||
def _start_resourcemanager(snn):
|
def _start_resourcemanager(snn):
|
||||||
run.start_yarn_process(snn, 'resourcemanager')
|
run.start_yarn_process(snn, 'resourcemanager')
|
|
@ -17,12 +17,11 @@ import re
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
from sahara import conductor as cond
|
from sahara.plugins import castellan_utils as castellan
|
||||||
from sahara import context
|
from sahara.plugins import conductor
|
||||||
from sahara.plugins.vanilla import utils as u
|
from sahara.plugins import context
|
||||||
from sahara.service.castellan import utils as castellan
|
from sahara_plugin_vanilla.plugins.vanilla import utils as u
|
||||||
|
|
||||||
conductor = cond.API
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
|
@ -13,12 +13,11 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.i18n import _
|
|
||||||
from sahara.plugins import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins import utils as u
|
from sahara.plugins import utils as u
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as cu
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper as cu
|
||||||
from sahara.utils import general as gu
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
|
|
||||||
|
|
||||||
def validate_cluster_creating(pctx, cluster):
|
def validate_cluster_creating(pctx, cluster):
|
||||||
|
@ -95,7 +94,7 @@ def validate_additional_ng_scaling(cluster, additional):
|
||||||
scalable_processes = _get_scalable_processes()
|
scalable_processes = _get_scalable_processes()
|
||||||
|
|
||||||
for ng_id in additional:
|
for ng_id in additional:
|
||||||
ng = gu.get_by_id(cluster.node_groups, ng_id)
|
ng = u.get_by_id(cluster.node_groups, ng_id)
|
||||||
if not set(ng.node_processes).issubset(scalable_processes):
|
if not set(ng.node_processes).issubset(scalable_processes):
|
||||||
msg = _("Vanilla plugin cannot scale nodegroup with processes: %s")
|
msg = _("Vanilla plugin cannot scale nodegroup with processes: %s")
|
||||||
raise ex.NodeGroupCannotBeScaled(ng.name,
|
raise ex.NodeGroupCannotBeScaled(ng.name,
|
||||||
|
@ -140,7 +139,7 @@ def validate_zookeeper_node_count(zk_ng, existing, additional):
|
||||||
zk_amount += ng.count
|
zk_amount += ng.count
|
||||||
|
|
||||||
for ng_id in additional:
|
for ng_id in additional:
|
||||||
ng = gu.get_by_id(zk_ng, ng_id)
|
ng = u.get_by_id(zk_ng, ng_id)
|
||||||
if "zookeeper" in ng.node_processes:
|
if "zookeeper" in ng.node_processes:
|
||||||
zk_amount += ng.count
|
zk_amount += ng.count
|
||||||
|
|
|
@ -15,9 +15,9 @@
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
from sahara.i18n import _
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla import versionfactory as vhf
|
from sahara_plugin_vanilla.i18n import _
|
||||||
|
from sahara_plugin_vanilla.plugins.vanilla import versionfactory as vhf
|
||||||
|
|
||||||
|
|
||||||
class VanillaProvider(p.ProvisioningPluginBase):
|
class VanillaProvider(p.ProvisioningPluginBase):
|
|
@ -16,8 +16,8 @@
|
||||||
|
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
|
|
||||||
|
from sahara.plugins import castellan_utils as castellan
|
||||||
from sahara.plugins import utils as u
|
from sahara.plugins import utils as u
|
||||||
from sahara.service.castellan import utils as castellan
|
|
||||||
|
|
||||||
|
|
||||||
def get_namenode(cluster):
|
def get_namenode(cluster):
|
|
@ -19,34 +19,40 @@ from oslo_config import cfg
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara.plugins import utils
|
||||||
from sahara.utils import xmlutils as x
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
||||||
|
|
||||||
CORE_DEFAULT = x.load_hadoop_xml_defaults(
|
CORE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_1/resources/core-default.xml')
|
'plugins/vanilla/v2_7_1/resources/core-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
HDFS_DEFAULT = x.load_hadoop_xml_defaults(
|
HDFS_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_1/resources/hdfs-default.xml')
|
'plugins/vanilla/v2_7_1/resources/hdfs-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
|
MAPRED_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')
|
'plugins/vanilla/v2_7_1/resources/mapred-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
YARN_DEFAULT = x.load_hadoop_xml_defaults(
|
YARN_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_1/resources/yarn-default.xml')
|
'plugins/vanilla/v2_7_1/resources/yarn-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
|
OOZIE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_1/resources/oozie-default.xml')
|
'plugins/vanilla/v2_7_1/resources/oozie-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
HIVE_DEFAULT = x.load_hadoop_xml_defaults(
|
HIVE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_1/resources/hive-default.xml')
|
'plugins/vanilla/v2_7_1/resources/hive-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
_default_executor_classpath = ":".join(
|
_default_executor_classpath = ":".join(
|
||||||
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.1.jar'])
|
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.1.jar'])
|
||||||
|
|
||||||
SPARK_CONFS = copy.deepcopy(c_helper.SPARK_CONFS)
|
SPARK_CONFS = copy.deepcopy(config_helper.SPARK_CONFS)
|
||||||
|
|
||||||
SPARK_CONFS['Spark']['OPTIONS'].append(
|
SPARK_CONFS['Spark']['OPTIONS'].append(
|
||||||
{
|
{
|
||||||
|
@ -87,15 +93,15 @@ ENV_CONFS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# Initialise plugin Hadoop configurations
|
# Initialise plugin Hadoop configurations
|
||||||
PLUGIN_XML_CONFIGS = c_helper.init_xml_configs(XML_CONFS)
|
PLUGIN_XML_CONFIGS = config_helper.init_xml_configs(XML_CONFS)
|
||||||
PLUGIN_ENV_CONFIGS = c_helper.init_env_configs(ENV_CONFS)
|
PLUGIN_ENV_CONFIGS = config_helper.init_env_configs(ENV_CONFS)
|
||||||
|
|
||||||
|
|
||||||
def _init_all_configs():
|
def _init_all_configs():
|
||||||
configs = []
|
configs = []
|
||||||
configs.extend(PLUGIN_XML_CONFIGS)
|
configs.extend(PLUGIN_XML_CONFIGS)
|
||||||
configs.extend(PLUGIN_ENV_CONFIGS)
|
configs.extend(PLUGIN_ENV_CONFIGS)
|
||||||
configs.extend(c_helper.PLUGIN_GENERAL_CONFIGS)
|
configs.extend(config_helper.PLUGIN_GENERAL_CONFIGS)
|
||||||
configs.extend(_get_spark_configs())
|
configs.extend(_get_spark_configs())
|
||||||
configs.extend(_get_zookeeper_configs())
|
configs.extend(_get_zookeeper_configs())
|
||||||
return configs
|
return configs
|
||||||
|
@ -124,7 +130,7 @@ def _get_spark_configs():
|
||||||
|
|
||||||
def _get_zookeeper_configs():
|
def _get_zookeeper_configs():
|
||||||
zk_configs = []
|
zk_configs = []
|
||||||
for service, config_items in six.iteritems(c_helper.ZOOKEEPER_CONFS):
|
for service, config_items in six.iteritems(config_helper.ZOOKEEPER_CONFS):
|
||||||
for item in config_items['OPTIONS']:
|
for item in config_items['OPTIONS']:
|
||||||
cfg = p.Config(name=item["name"],
|
cfg = p.Config(name=item["name"],
|
||||||
description=item["description"],
|
description=item["description"],
|
|
@ -14,34 +14,33 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from sahara import exceptions as ex
|
from sahara.plugins import edp
|
||||||
from sahara.i18n import _
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins import utils as plugin_utils
|
from sahara.plugins import utils as plugin_utils
|
||||||
from sahara.plugins.vanilla import confighints_helper as ch_helper
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla import confighints_helper as chh
|
||||||
from sahara.plugins.vanilla import utils as v_utils
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import edp_engine
|
||||||
from sahara.service.edp.spark import engine as edp_spark_engine
|
from sahara_plugin_vanilla.plugins.vanilla import utils as v_utils
|
||||||
from sahara.utils import edp
|
|
||||||
|
|
||||||
|
|
||||||
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_possible_job_config(job_type):
|
def get_possible_job_config(job_type):
|
||||||
if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
|
if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
|
||||||
return {'job_config': ch_helper.get_possible_hive_config_from(
|
return {'job_config': chh.get_possible_hive_config_from(
|
||||||
'plugins/vanilla/v2_7_1/resources/hive-default.xml')}
|
'plugins/vanilla/v2_7_1/resources/hive-default.xml')}
|
||||||
if edp.compare_job_type(job_type,
|
if edp.compare_job_type(job_type,
|
||||||
edp.JOB_TYPE_MAPREDUCE,
|
edp.JOB_TYPE_MAPREDUCE,
|
||||||
edp.JOB_TYPE_MAPREDUCE_STREAMING):
|
edp.JOB_TYPE_MAPREDUCE_STREAMING):
|
||||||
return {'job_config': ch_helper.get_possible_mapreduce_config_from(
|
return {'job_config': chh.get_possible_mapreduce_config_from(
|
||||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')}
|
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')}
|
||||||
if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
|
if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
|
||||||
return {'job_config': ch_helper.get_possible_pig_config_from(
|
return {'job_config': chh.get_possible_pig_config_from(
|
||||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')}
|
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')}
|
||||||
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
|
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
|
||||||
|
|
||||||
|
|
||||||
class EdpSparkEngine(edp_spark_engine.SparkJobEngine):
|
class EdpSparkEngine(edp.PluginsSparkJobEngine):
|
||||||
|
|
||||||
edp_base_version = "2.7.1"
|
edp_base_version = "2.7.1"
|
||||||
|
|
||||||
|
@ -68,13 +67,13 @@ class EdpSparkEngine(edp_spark_engine.SparkJobEngine):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def job_type_supported(job_type):
|
def job_type_supported(job_type):
|
||||||
return (job_type in
|
return (job_type in
|
||||||
edp_spark_engine.SparkJobEngine.get_supported_job_types())
|
edp.PluginsSparkJobEngine.get_supported_job_types())
|
||||||
|
|
||||||
def validate_job_execution(self, cluster, job, data):
|
def validate_job_execution(self, cluster, job, data):
|
||||||
if (not self.edp_supported(cluster.hadoop_version) or
|
if (not self.edp_supported(cluster.hadoop_version) or
|
||||||
not v_utils.get_spark_history_server(cluster)):
|
not v_utils.get_spark_history_server(cluster)):
|
||||||
|
|
||||||
raise ex.InvalidDataException(
|
raise ex.PluginInvalidDataException(
|
||||||
_('Spark {base} or higher required to run {type} jobs').format(
|
_('Spark {base} or higher required to run {type} jobs').format(
|
||||||
base=EdpSparkEngine.edp_base_version, type=job.type))
|
base=EdpSparkEngine.edp_base_version, type=job.type))
|
||||||
|
|
|
@ -15,34 +15,32 @@
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from sahara import conductor
|
from sahara.plugins import conductor
|
||||||
from sahara import context
|
from sahara.plugins import context
|
||||||
|
from sahara.plugins import swift_helper
|
||||||
from sahara.plugins import utils
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla import abstractversionhandler as avm
|
from sahara_plugin_vanilla.plugins.vanilla import abstractversionhandler as avm
|
||||||
from sahara.plugins.vanilla.hadoop2 import config as c
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config as c
|
||||||
from sahara.plugins.vanilla.hadoop2 import keypairs
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import keypairs
|
||||||
from sahara.plugins.vanilla.hadoop2 import recommendations_utils as ru
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import recommendations_utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import run_scripts as run
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import run_scripts as run
|
||||||
from sahara.plugins.vanilla.hadoop2 import scaling as sc
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import scaling as sc
|
||||||
from sahara.plugins.vanilla.hadoop2 import starting_scripts as s_scripts
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import starting_scripts
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
from sahara.plugins.vanilla.hadoop2 import validation as vl
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import validation as vl
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.plugins.vanilla.v2_8_2 import config_helper as c_helper
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1 import config_helper
|
||||||
from sahara.plugins.vanilla.v2_8_2 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1 import edp_engine
|
||||||
from sahara.swift import swift_helper
|
|
||||||
from sahara.utils import cluster as cluster_utils
|
|
||||||
|
|
||||||
|
|
||||||
conductor = conductor.API
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class VersionHandler(avm.AbstractVersionHandler):
|
class VersionHandler(avm.AbstractVersionHandler):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.pctx = {
|
self.pctx = {
|
||||||
'env_confs': c_helper.get_env_configs(),
|
'env_confs': config_helper.get_env_configs(),
|
||||||
'all_confs': c_helper.get_plugin_configs()
|
'all_confs': config_helper.get_plugin_configs()
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_plugin_configs(self):
|
def get_plugin_configs(self):
|
||||||
|
@ -72,22 +70,22 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||||
def start_cluster(self, cluster):
|
def start_cluster(self, cluster):
|
||||||
keypairs.provision_keypairs(cluster)
|
keypairs.provision_keypairs(cluster)
|
||||||
|
|
||||||
s_scripts.start_namenode(cluster)
|
starting_scripts.start_namenode(cluster)
|
||||||
s_scripts.start_secondarynamenode(cluster)
|
starting_scripts.start_secondarynamenode(cluster)
|
||||||
s_scripts.start_resourcemanager(cluster)
|
starting_scripts.start_resourcemanager(cluster)
|
||||||
|
|
||||||
run.start_dn_nm_processes(utils.get_instances(cluster))
|
run.start_dn_nm_processes(utils.get_instances(cluster))
|
||||||
run.await_datanodes(cluster)
|
run.await_datanodes(cluster)
|
||||||
|
|
||||||
s_scripts.start_historyserver(cluster)
|
starting_scripts.start_historyserver(cluster)
|
||||||
s_scripts.start_oozie(self.pctx, cluster)
|
starting_scripts.start_oozie(self.pctx, cluster)
|
||||||
s_scripts.start_hiveserver(self.pctx, cluster)
|
starting_scripts.start_hiveserver(self.pctx, cluster)
|
||||||
s_scripts.start_zookeeper(cluster)
|
starting_scripts.start_zookeeper(cluster)
|
||||||
|
|
||||||
swift_helper.install_ssl_certs(cluster_utils.get_instances(cluster))
|
swift_helper.install_ssl_certs(utils.get_instances(cluster))
|
||||||
|
|
||||||
self._set_cluster_info(cluster)
|
self._set_cluster_info(cluster)
|
||||||
s_scripts.start_spark(cluster)
|
starting_scripts.start_spark(cluster)
|
||||||
|
|
||||||
def decommission_nodes(self, cluster, instances):
|
def decommission_nodes(self, cluster, instances):
|
||||||
sc.decommission_nodes(self.pctx, cluster, instances)
|
sc.decommission_nodes(self.pctx, cluster, instances)
|
||||||
|
@ -167,4 +165,6 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||||
return c.get_open_ports(node_group)
|
return c.get_open_ports(node_group)
|
||||||
|
|
||||||
def recommend_configs(self, cluster, scaling):
|
def recommend_configs(self, cluster, scaling):
|
||||||
ru.recommend_configs(cluster, self.get_plugin_configs(), scaling)
|
recommendations_utils.recommend_configs(cluster,
|
||||||
|
self.get_plugin_configs(),
|
||||||
|
scaling)
|
|
@ -19,34 +19,40 @@ from oslo_config import cfg
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara.plugins import utils
|
||||||
from sahara.utils import xmlutils as x
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
||||||
|
|
||||||
CORE_DEFAULT = x.load_hadoop_xml_defaults(
|
CORE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_5/resources/core-default.xml')
|
'plugins/vanilla/v2_7_5/resources/core-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
HDFS_DEFAULT = x.load_hadoop_xml_defaults(
|
HDFS_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_5/resources/hdfs-default.xml')
|
'plugins/vanilla/v2_7_5/resources/hdfs-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
|
MAPRED_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')
|
'plugins/vanilla/v2_7_5/resources/mapred-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
YARN_DEFAULT = x.load_hadoop_xml_defaults(
|
YARN_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_5/resources/yarn-default.xml')
|
'plugins/vanilla/v2_7_5/resources/yarn-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
|
OOZIE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_5/resources/oozie-default.xml')
|
'plugins/vanilla/v2_7_5/resources/oozie-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
HIVE_DEFAULT = x.load_hadoop_xml_defaults(
|
HIVE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_7_5/resources/hive-default.xml')
|
'plugins/vanilla/v2_7_5/resources/hive-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
_default_executor_classpath = ":".join(
|
_default_executor_classpath = ":".join(
|
||||||
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.5.jar'])
|
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.5.jar'])
|
||||||
|
|
||||||
SPARK_CONFS = copy.deepcopy(c_helper.SPARK_CONFS)
|
SPARK_CONFS = copy.deepcopy(config_helper.SPARK_CONFS)
|
||||||
|
|
||||||
SPARK_CONFS['Spark']['OPTIONS'].append(
|
SPARK_CONFS['Spark']['OPTIONS'].append(
|
||||||
{
|
{
|
||||||
|
@ -87,15 +93,15 @@ ENV_CONFS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# Initialise plugin Hadoop configurations
|
# Initialise plugin Hadoop configurations
|
||||||
PLUGIN_XML_CONFIGS = c_helper.init_xml_configs(XML_CONFS)
|
PLUGIN_XML_CONFIGS = config_helper.init_xml_configs(XML_CONFS)
|
||||||
PLUGIN_ENV_CONFIGS = c_helper.init_env_configs(ENV_CONFS)
|
PLUGIN_ENV_CONFIGS = config_helper.init_env_configs(ENV_CONFS)
|
||||||
|
|
||||||
|
|
||||||
def _init_all_configs():
|
def _init_all_configs():
|
||||||
configs = []
|
configs = []
|
||||||
configs.extend(PLUGIN_XML_CONFIGS)
|
configs.extend(PLUGIN_XML_CONFIGS)
|
||||||
configs.extend(PLUGIN_ENV_CONFIGS)
|
configs.extend(PLUGIN_ENV_CONFIGS)
|
||||||
configs.extend(c_helper.PLUGIN_GENERAL_CONFIGS)
|
configs.extend(config_helper.PLUGIN_GENERAL_CONFIGS)
|
||||||
configs.extend(_get_spark_configs())
|
configs.extend(_get_spark_configs())
|
||||||
configs.extend(_get_zookeeper_configs())
|
configs.extend(_get_zookeeper_configs())
|
||||||
return configs
|
return configs
|
||||||
|
@ -124,7 +130,7 @@ def _get_spark_configs():
|
||||||
|
|
||||||
def _get_zookeeper_configs():
|
def _get_zookeeper_configs():
|
||||||
zk_configs = []
|
zk_configs = []
|
||||||
for service, config_items in six.iteritems(c_helper.ZOOKEEPER_CONFS):
|
for service, config_items in six.iteritems(config_helper.ZOOKEEPER_CONFS):
|
||||||
for item in config_items['OPTIONS']:
|
for item in config_items['OPTIONS']:
|
||||||
cfg = p.Config(name=item["name"],
|
cfg = p.Config(name=item["name"],
|
||||||
description=item["description"],
|
description=item["description"],
|
|
@ -14,34 +14,37 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from sahara import exceptions as ex
|
from sahara.plugins import edp
|
||||||
from sahara.i18n import _
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins import utils as plugin_utils
|
from sahara.plugins import utils as plugin_utils
|
||||||
from sahara.plugins.vanilla import confighints_helper as ch_helper
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla import confighints_helper
|
||||||
from sahara.plugins.vanilla import utils as v_utils
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import edp_engine
|
||||||
from sahara.service.edp.spark import engine as edp_spark_engine
|
from sahara_plugin_vanilla.plugins.vanilla import utils as v_utils
|
||||||
from sahara.utils import edp
|
|
||||||
|
|
||||||
|
|
||||||
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_possible_job_config(job_type):
|
def get_possible_job_config(job_type):
|
||||||
if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
|
if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
|
||||||
return {'job_config': ch_helper.get_possible_hive_config_from(
|
return {
|
||||||
|
'job_config': confighints_helper.get_possible_hive_config_from(
|
||||||
'plugins/vanilla/v2_7_5/resources/hive-default.xml')}
|
'plugins/vanilla/v2_7_5/resources/hive-default.xml')}
|
||||||
if edp.compare_job_type(job_type,
|
if edp.compare_job_type(job_type,
|
||||||
edp.JOB_TYPE_MAPREDUCE,
|
edp.JOB_TYPE_MAPREDUCE,
|
||||||
edp.JOB_TYPE_MAPREDUCE_STREAMING):
|
edp.JOB_TYPE_MAPREDUCE_STREAMING):
|
||||||
return {'job_config': ch_helper.get_possible_mapreduce_config_from(
|
return {
|
||||||
|
'job_config':
|
||||||
|
confighints_helper.get_possible_mapreduce_config_from(
|
||||||
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')}
|
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')}
|
||||||
if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
|
if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
|
||||||
return {'job_config': ch_helper.get_possible_pig_config_from(
|
return {
|
||||||
|
'job_config': confighints_helper.get_possible_pig_config_from(
|
||||||
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')}
|
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')}
|
||||||
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
|
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
|
||||||
|
|
||||||
|
|
||||||
class EdpSparkEngine(edp_spark_engine.SparkJobEngine):
|
class EdpSparkEngine(edp.PluginsSparkJobEngine):
|
||||||
|
|
||||||
edp_base_version = "2.7.5"
|
edp_base_version = "2.7.5"
|
||||||
|
|
||||||
|
@ -68,7 +71,7 @@ class EdpSparkEngine(edp_spark_engine.SparkJobEngine):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def job_type_supported(job_type):
|
def job_type_supported(job_type):
|
||||||
return (job_type in
|
return (job_type in
|
||||||
edp_spark_engine.SparkJobEngine.get_supported_job_types())
|
edp.PluginsSparkJobEngine.get_supported_job_types())
|
||||||
|
|
||||||
def validate_job_execution(self, cluster, job, data):
|
def validate_job_execution(self, cluster, job, data):
|
||||||
if (not self.edp_supported(cluster.hadoop_version) or
|
if (not self.edp_supported(cluster.hadoop_version) or
|
|
@ -15,34 +15,32 @@
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from sahara import conductor
|
from sahara.plugins import conductor
|
||||||
from sahara import context
|
from sahara.plugins import context
|
||||||
|
from sahara.plugins import swift_helper
|
||||||
from sahara.plugins import utils
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla import abstractversionhandler as avm
|
from sahara_plugin_vanilla.plugins.vanilla import abstractversionhandler as avm
|
||||||
from sahara.plugins.vanilla.hadoop2 import config as c
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config as c
|
||||||
from sahara.plugins.vanilla.hadoop2 import keypairs
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import keypairs
|
||||||
from sahara.plugins.vanilla.hadoop2 import recommendations_utils as ru
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import recommendations_utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import run_scripts as run
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import run_scripts as run
|
||||||
from sahara.plugins.vanilla.hadoop2 import scaling as sc
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import scaling as sc
|
||||||
from sahara.plugins.vanilla.hadoop2 import starting_scripts as s_scripts
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import starting_scripts
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
from sahara.plugins.vanilla.hadoop2 import validation as vl
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import validation as vl
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.plugins.vanilla.v2_7_5 import config_helper as c_helper
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5 import config_helper
|
||||||
from sahara.plugins.vanilla.v2_7_5 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5 import edp_engine
|
||||||
from sahara.swift import swift_helper
|
|
||||||
from sahara.utils import cluster as cluster_utils
|
|
||||||
|
|
||||||
|
|
||||||
conductor = conductor.API
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class VersionHandler(avm.AbstractVersionHandler):
|
class VersionHandler(avm.AbstractVersionHandler):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.pctx = {
|
self.pctx = {
|
||||||
'env_confs': c_helper.get_env_configs(),
|
'env_confs': config_helper.get_env_configs(),
|
||||||
'all_confs': c_helper.get_plugin_configs()
|
'all_confs': config_helper.get_plugin_configs()
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_plugin_configs(self):
|
def get_plugin_configs(self):
|
||||||
|
@ -72,22 +70,22 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||||
def start_cluster(self, cluster):
|
def start_cluster(self, cluster):
|
||||||
keypairs.provision_keypairs(cluster)
|
keypairs.provision_keypairs(cluster)
|
||||||
|
|
||||||
s_scripts.start_namenode(cluster)
|
starting_scripts.start_namenode(cluster)
|
||||||
s_scripts.start_secondarynamenode(cluster)
|
starting_scripts.start_secondarynamenode(cluster)
|
||||||
s_scripts.start_resourcemanager(cluster)
|
starting_scripts.start_resourcemanager(cluster)
|
||||||
|
|
||||||
run.start_dn_nm_processes(utils.get_instances(cluster))
|
run.start_dn_nm_processes(utils.get_instances(cluster))
|
||||||
run.await_datanodes(cluster)
|
run.await_datanodes(cluster)
|
||||||
|
|
||||||
s_scripts.start_historyserver(cluster)
|
starting_scripts.start_historyserver(cluster)
|
||||||
s_scripts.start_oozie(self.pctx, cluster)
|
starting_scripts.start_oozie(self.pctx, cluster)
|
||||||
s_scripts.start_hiveserver(self.pctx, cluster)
|
starting_scripts.start_hiveserver(self.pctx, cluster)
|
||||||
s_scripts.start_zookeeper(cluster)
|
starting_scripts.start_zookeeper(cluster)
|
||||||
|
|
||||||
swift_helper.install_ssl_certs(cluster_utils.get_instances(cluster))
|
swift_helper.install_ssl_certs(utils.get_instances(cluster))
|
||||||
|
|
||||||
self._set_cluster_info(cluster)
|
self._set_cluster_info(cluster)
|
||||||
s_scripts.start_spark(cluster)
|
starting_scripts.start_spark(cluster)
|
||||||
|
|
||||||
def decommission_nodes(self, cluster, instances):
|
def decommission_nodes(self, cluster, instances):
|
||||||
sc.decommission_nodes(self.pctx, cluster, instances)
|
sc.decommission_nodes(self.pctx, cluster, instances)
|
||||||
|
@ -167,4 +165,6 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||||
return c.get_open_ports(node_group)
|
return c.get_open_ports(node_group)
|
||||||
|
|
||||||
def recommend_configs(self, cluster, scaling):
|
def recommend_configs(self, cluster, scaling):
|
||||||
ru.recommend_configs(cluster, self.get_plugin_configs(), scaling)
|
recommendations_utils.recommend_configs(cluster,
|
||||||
|
self.get_plugin_configs(),
|
||||||
|
scaling)
|
|
@ -19,34 +19,40 @@ from oslo_config import cfg
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara.plugins import utils
|
||||||
from sahara.utils import xmlutils as x
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
|
||||||
|
|
||||||
CORE_DEFAULT = x.load_hadoop_xml_defaults(
|
CORE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_8_2/resources/core-default.xml')
|
'plugins/vanilla/v2_8_2/resources/core-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
HDFS_DEFAULT = x.load_hadoop_xml_defaults(
|
HDFS_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_8_2/resources/hdfs-default.xml')
|
'plugins/vanilla/v2_8_2/resources/hdfs-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
|
MAPRED_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_8_2/resources/mapred-default.xml')
|
'plugins/vanilla/v2_8_2/resources/mapred-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
YARN_DEFAULT = x.load_hadoop_xml_defaults(
|
YARN_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_8_2/resources/yarn-default.xml')
|
'plugins/vanilla/v2_8_2/resources/yarn-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
|
OOZIE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_8_2/resources/oozie-default.xml')
|
'plugins/vanilla/v2_8_2/resources/oozie-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
HIVE_DEFAULT = x.load_hadoop_xml_defaults(
|
HIVE_DEFAULT = utils.load_hadoop_xml_defaults(
|
||||||
'plugins/vanilla/v2_8_2/resources/hive-default.xml')
|
'plugins/vanilla/v2_8_2/resources/hive-default.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
_default_executor_classpath = ":".join(
|
_default_executor_classpath = ":".join(
|
||||||
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.8.2.jar'])
|
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.8.2.jar'])
|
||||||
|
|
||||||
SPARK_CONFS = copy.deepcopy(c_helper.SPARK_CONFS)
|
SPARK_CONFS = copy.deepcopy(config_helper.SPARK_CONFS)
|
||||||
|
|
||||||
SPARK_CONFS['Spark']['OPTIONS'].append(
|
SPARK_CONFS['Spark']['OPTIONS'].append(
|
||||||
{
|
{
|
||||||
|
@ -87,15 +93,15 @@ ENV_CONFS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# Initialise plugin Hadoop configurations
|
# Initialise plugin Hadoop configurations
|
||||||
PLUGIN_XML_CONFIGS = c_helper.init_xml_configs(XML_CONFS)
|
PLUGIN_XML_CONFIGS = config_helper.init_xml_configs(XML_CONFS)
|
||||||
PLUGIN_ENV_CONFIGS = c_helper.init_env_configs(ENV_CONFS)
|
PLUGIN_ENV_CONFIGS = config_helper.init_env_configs(ENV_CONFS)
|
||||||
|
|
||||||
|
|
||||||
def _init_all_configs():
|
def _init_all_configs():
|
||||||
configs = []
|
configs = []
|
||||||
configs.extend(PLUGIN_XML_CONFIGS)
|
configs.extend(PLUGIN_XML_CONFIGS)
|
||||||
configs.extend(PLUGIN_ENV_CONFIGS)
|
configs.extend(PLUGIN_ENV_CONFIGS)
|
||||||
configs.extend(c_helper.PLUGIN_GENERAL_CONFIGS)
|
configs.extend(config_helper.PLUGIN_GENERAL_CONFIGS)
|
||||||
configs.extend(_get_spark_configs())
|
configs.extend(_get_spark_configs())
|
||||||
configs.extend(_get_zookeeper_configs())
|
configs.extend(_get_zookeeper_configs())
|
||||||
return configs
|
return configs
|
||||||
|
@ -124,7 +130,7 @@ def _get_spark_configs():
|
||||||
|
|
||||||
def _get_zookeeper_configs():
|
def _get_zookeeper_configs():
|
||||||
zk_configs = []
|
zk_configs = []
|
||||||
for service, config_items in six.iteritems(c_helper.ZOOKEEPER_CONFS):
|
for service, config_items in six.iteritems(config_helper.ZOOKEEPER_CONFS):
|
||||||
for item in config_items['OPTIONS']:
|
for item in config_items['OPTIONS']:
|
||||||
cfg = p.Config(name=item["name"],
|
cfg = p.Config(name=item["name"],
|
||||||
description=item["description"],
|
description=item["description"],
|
|
@ -14,34 +14,37 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from sahara import exceptions as ex
|
from sahara.plugins import edp
|
||||||
from sahara.i18n import _
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins import utils as plugin_utils
|
from sahara.plugins import utils as plugin_utils
|
||||||
from sahara.plugins.vanilla import confighints_helper as ch_helper
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla import confighints_helper
|
||||||
from sahara.plugins.vanilla import utils as v_utils
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import edp_engine
|
||||||
from sahara.service.edp.spark import engine as edp_spark_engine
|
from sahara_plugin_vanilla.plugins.vanilla import utils as v_utils
|
||||||
from sahara.utils import edp
|
|
||||||
|
|
||||||
|
|
||||||
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
class EdpOozieEngine(edp_engine.EdpOozieEngine):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_possible_job_config(job_type):
|
def get_possible_job_config(job_type):
|
||||||
if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
|
if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
|
||||||
return {'job_config': ch_helper.get_possible_hive_config_from(
|
return {
|
||||||
|
'job_config': confighints_helper.get_possible_hive_config_from(
|
||||||
'plugins/vanilla/v2_8_2/resources/hive-default.xml')}
|
'plugins/vanilla/v2_8_2/resources/hive-default.xml')}
|
||||||
if edp.compare_job_type(job_type,
|
if edp.compare_job_type(job_type,
|
||||||
edp.JOB_TYPE_MAPREDUCE,
|
edp.JOB_TYPE_MAPREDUCE,
|
||||||
edp.JOB_TYPE_MAPREDUCE_STREAMING):
|
edp.JOB_TYPE_MAPREDUCE_STREAMING):
|
||||||
return {'job_config': ch_helper.get_possible_mapreduce_config_from(
|
return {
|
||||||
|
'job_config':
|
||||||
|
confighints_helper.get_possible_mapreduce_config_from(
|
||||||
'plugins/vanilla/v2_8_2/resources/mapred-default.xml')}
|
'plugins/vanilla/v2_8_2/resources/mapred-default.xml')}
|
||||||
if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
|
if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
|
||||||
return {'job_config': ch_helper.get_possible_pig_config_from(
|
return {
|
||||||
|
'job_config': confighints_helper.get_possible_pig_config_from(
|
||||||
'plugins/vanilla/v2_8_2/resources/mapred-default.xml')}
|
'plugins/vanilla/v2_8_2/resources/mapred-default.xml')}
|
||||||
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
|
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
|
||||||
|
|
||||||
|
|
||||||
class EdpSparkEngine(edp_spark_engine.SparkJobEngine):
|
class EdpSparkEngine(edp.PluginsSparkJobEngine):
|
||||||
|
|
||||||
edp_base_version = "2.8.2"
|
edp_base_version = "2.8.2"
|
||||||
|
|
||||||
|
@ -68,13 +71,13 @@ class EdpSparkEngine(edp_spark_engine.SparkJobEngine):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def job_type_supported(job_type):
|
def job_type_supported(job_type):
|
||||||
return (job_type in
|
return (job_type in
|
||||||
edp_spark_engine.SparkJobEngine.get_supported_job_types())
|
edp.PluginsSparkJobEngine.get_supported_job_types())
|
||||||
|
|
||||||
def validate_job_execution(self, cluster, job, data):
|
def validate_job_execution(self, cluster, job, data):
|
||||||
if (not self.edp_supported(cluster.hadoop_version) or
|
if (not self.edp_supported(cluster.hadoop_version) or
|
||||||
not v_utils.get_spark_history_server(cluster)):
|
not v_utils.get_spark_history_server(cluster)):
|
||||||
|
|
||||||
raise ex.InvalidDataException(
|
raise ex.PluginInvalidDataException(
|
||||||
_('Spark {base} or higher required to run {type} jobs').format(
|
_('Spark {base} or higher required to run {type} jobs').format(
|
||||||
base=EdpSparkEngine.edp_base_version, type=job.type))
|
base=EdpSparkEngine.edp_base_version, type=job.type))
|
||||||
|
|
|
@ -15,34 +15,32 @@
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from sahara import conductor
|
from sahara.plugins import conductor
|
||||||
from sahara import context
|
from sahara.plugins import context
|
||||||
|
from sahara.plugins import swift_helper
|
||||||
from sahara.plugins import utils
|
from sahara.plugins import utils
|
||||||
from sahara.plugins.vanilla import abstractversionhandler as avm
|
from sahara_plugin_vanilla.plugins.vanilla import abstractversionhandler as avm
|
||||||
from sahara.plugins.vanilla.hadoop2 import config as c
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config as c
|
||||||
from sahara.plugins.vanilla.hadoop2 import keypairs
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import keypairs
|
||||||
from sahara.plugins.vanilla.hadoop2 import recommendations_utils as ru
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import recommendations_utils
|
||||||
from sahara.plugins.vanilla.hadoop2 import run_scripts as run
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import run_scripts as run
|
||||||
from sahara.plugins.vanilla.hadoop2 import scaling as sc
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import scaling as sc
|
||||||
from sahara.plugins.vanilla.hadoop2 import starting_scripts as s_scripts
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import starting_scripts
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
from sahara.plugins.vanilla.hadoop2 import validation as vl
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import validation as vl
|
||||||
from sahara.plugins.vanilla import utils as vu
|
from sahara_plugin_vanilla.plugins.vanilla import utils as vu
|
||||||
from sahara.plugins.vanilla.v2_7_1 import config_helper as c_helper
|
from sahara_plugin_vanilla.plugins.vanilla.v2_8_2 import config_helper
|
||||||
from sahara.plugins.vanilla.v2_7_1 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla.v2_8_2 import edp_engine
|
||||||
from sahara.swift import swift_helper
|
|
||||||
from sahara.utils import cluster as cluster_utils
|
|
||||||
|
|
||||||
|
|
||||||
conductor = conductor.API
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class VersionHandler(avm.AbstractVersionHandler):
|
class VersionHandler(avm.AbstractVersionHandler):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.pctx = {
|
self.pctx = {
|
||||||
'env_confs': c_helper.get_env_configs(),
|
'env_confs': config_helper.get_env_configs(),
|
||||||
'all_confs': c_helper.get_plugin_configs()
|
'all_confs': config_helper.get_plugin_configs()
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_plugin_configs(self):
|
def get_plugin_configs(self):
|
||||||
|
@ -72,22 +70,22 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||||
def start_cluster(self, cluster):
|
def start_cluster(self, cluster):
|
||||||
keypairs.provision_keypairs(cluster)
|
keypairs.provision_keypairs(cluster)
|
||||||
|
|
||||||
s_scripts.start_namenode(cluster)
|
starting_scripts.start_namenode(cluster)
|
||||||
s_scripts.start_secondarynamenode(cluster)
|
starting_scripts.start_secondarynamenode(cluster)
|
||||||
s_scripts.start_resourcemanager(cluster)
|
starting_scripts.start_resourcemanager(cluster)
|
||||||
|
|
||||||
run.start_dn_nm_processes(utils.get_instances(cluster))
|
run.start_dn_nm_processes(utils.get_instances(cluster))
|
||||||
run.await_datanodes(cluster)
|
run.await_datanodes(cluster)
|
||||||
|
|
||||||
s_scripts.start_historyserver(cluster)
|
starting_scripts.start_historyserver(cluster)
|
||||||
s_scripts.start_oozie(self.pctx, cluster)
|
starting_scripts.start_oozie(self.pctx, cluster)
|
||||||
s_scripts.start_hiveserver(self.pctx, cluster)
|
starting_scripts.start_hiveserver(self.pctx, cluster)
|
||||||
s_scripts.start_zookeeper(cluster)
|
starting_scripts.start_zookeeper(cluster)
|
||||||
|
|
||||||
swift_helper.install_ssl_certs(cluster_utils.get_instances(cluster))
|
swift_helper.install_ssl_certs(utils.get_instances(cluster))
|
||||||
|
|
||||||
self._set_cluster_info(cluster)
|
self._set_cluster_info(cluster)
|
||||||
s_scripts.start_spark(cluster)
|
starting_scripts.start_spark(cluster)
|
||||||
|
|
||||||
def decommission_nodes(self, cluster, instances):
|
def decommission_nodes(self, cluster, instances):
|
||||||
sc.decommission_nodes(self.pctx, cluster, instances)
|
sc.decommission_nodes(self.pctx, cluster, instances)
|
||||||
|
@ -167,4 +165,6 @@ class VersionHandler(avm.AbstractVersionHandler):
|
||||||
return c.get_open_ports(node_group)
|
return c.get_open_ports(node_group)
|
||||||
|
|
||||||
def recommend_configs(self, cluster, scaling):
|
def recommend_configs(self, cluster, scaling):
|
||||||
ru.recommend_configs(cluster, self.get_plugin_configs(), scaling)
|
recommendations_utils.recommend_configs(cluster,
|
||||||
|
self.get_plugin_configs(),
|
||||||
|
scaling)
|
|
@ -16,7 +16,7 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from sahara.utils import general
|
from sahara.plugins import utils
|
||||||
|
|
||||||
|
|
||||||
class VersionFactory(object):
|
class VersionFactory(object):
|
||||||
|
@ -33,13 +33,13 @@ class VersionFactory(object):
|
||||||
for name in os.listdir(src_dir)
|
for name in os.listdir(src_dir)
|
||||||
if (os.path.isdir(os.path.join(src_dir, name))
|
if (os.path.isdir(os.path.join(src_dir, name))
|
||||||
and re.match(r'^v\d+_\d+_\d+$', name))])
|
and re.match(r'^v\d+_\d+_\d+$', name))])
|
||||||
versions.sort(key=general.natural_sort_key)
|
versions.sort(key=utils.natural_sort_key)
|
||||||
VersionFactory.versions = versions
|
VersionFactory.versions = versions
|
||||||
|
|
||||||
VersionFactory.modules = {}
|
VersionFactory.modules = {}
|
||||||
for version in VersionFactory.versions:
|
for version in VersionFactory.versions:
|
||||||
module_name = 'sahara.plugins.vanilla.v%s.versionhandler' % (
|
module_name = ('sahara_plugin_vanilla.plugins.vanilla.v%s.'
|
||||||
version.replace('.', '_'))
|
'versionhandler' % (version.replace('.', '_')))
|
||||||
module_class = getattr(
|
module_class = getattr(
|
||||||
__import__(module_name, fromlist=['sahara']),
|
__import__(module_name, fromlist=['sahara']),
|
||||||
'VersionHandler')
|
'VersionHandler')
|
|
@ -13,7 +13,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.utils import patches
|
from sahara_plugin_vanilla.utils import patches
|
||||||
patches.patch_all()
|
patches.patch_all()
|
||||||
|
|
||||||
import oslo_i18n
|
import oslo_i18n
|
|
@ -13,13 +13,12 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import mock
|
|
||||||
from oslotest import base
|
from oslotest import base
|
||||||
|
|
||||||
from sahara import context
|
from sahara.plugins import context
|
||||||
from sahara.db import api as db_api
|
from sahara.plugins import db as db_api
|
||||||
from sahara import main
|
from sahara.plugins import main
|
||||||
from sahara.utils import rpc
|
from sahara.plugins import utils
|
||||||
|
|
||||||
|
|
||||||
class SaharaTestCase(base.BaseTestCase):
|
class SaharaTestCase(base.BaseTestCase):
|
||||||
|
@ -27,7 +26,7 @@ class SaharaTestCase(base.BaseTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(SaharaTestCase, self).setUp()
|
super(SaharaTestCase, self).setUp()
|
||||||
self.setup_context()
|
self.setup_context()
|
||||||
rpc.setup('all-in-one')
|
utils.rpc_setup('all-in-one')
|
||||||
|
|
||||||
def setup_context(self, username="test_user", tenant_id="tenant_1",
|
def setup_context(self, username="test_user", tenant_id="tenant_1",
|
||||||
auth_token="test_auth_token", tenant_name='test_tenant',
|
auth_token="test_auth_token", tenant_name='test_tenant',
|
||||||
|
@ -35,14 +34,14 @@ class SaharaTestCase(base.BaseTestCase):
|
||||||
self.addCleanup(context.set_ctx,
|
self.addCleanup(context.set_ctx,
|
||||||
context.ctx() if context.has_ctx() else None)
|
context.ctx() if context.has_ctx() else None)
|
||||||
|
|
||||||
context.set_ctx(context.Context(
|
context.set_ctx(context.PluginsContext(
|
||||||
username=username, tenant_id=tenant_id,
|
username=username, tenant_id=tenant_id,
|
||||||
auth_token=auth_token, service_catalog=service_catalog or {},
|
auth_token=auth_token, service_catalog=service_catalog or {},
|
||||||
tenant_name=tenant_name, **kwargs))
|
tenant_name=tenant_name, **kwargs))
|
||||||
|
|
||||||
def override_config(self, name, override, group=None):
|
def override_config(self, name, override, group=None):
|
||||||
main.CONF.set_override(name, override, group)
|
main.set_override(name, override, group)
|
||||||
self.addCleanup(main.CONF.clear_override, name, group)
|
self.addCleanup(main.clear_override, name, group)
|
||||||
|
|
||||||
|
|
||||||
class SaharaWithDbTestCase(SaharaTestCase):
|
class SaharaWithDbTestCase(SaharaTestCase):
|
||||||
|
@ -52,22 +51,3 @@ class SaharaWithDbTestCase(SaharaTestCase):
|
||||||
self.override_config('connection', "sqlite://", group='database')
|
self.override_config('connection', "sqlite://", group='database')
|
||||||
db_api.setup_db()
|
db_api.setup_db()
|
||||||
self.addCleanup(db_api.drop_db)
|
self.addCleanup(db_api.drop_db)
|
||||||
|
|
||||||
|
|
||||||
class _ConsecutiveThreadGroup(context.ThreadGroup):
|
|
||||||
def __init__(self, _thread_pool_size=1000):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def spawn(self, thread_description, func, *args, **kwargs):
|
|
||||||
func(*args, **kwargs)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *ex):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def mock_thread_group(func):
|
|
||||||
return mock.patch('sahara.context.ThreadGroup',
|
|
||||||
new=_ConsecutiveThreadGroup)(func)
|
|
|
@ -16,15 +16,15 @@
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from sahara import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestConfigHelper(base.SaharaTestCase):
|
class TestConfigHelper(base.SaharaTestCase):
|
||||||
|
|
||||||
plugin_path = 'sahara.plugins.vanilla.hadoop2.'
|
plugin_path = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestConfigHelper, self).setUp()
|
super(TestConfigHelper, self).setUp()
|
||||||
|
@ -54,21 +54,22 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
'Oozie Heap Size': 1024
|
'Oozie Heap Size': 1024
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
configs = c_helper.init_env_configs(ENV_CONFS)
|
configs = config_helper.init_env_configs(ENV_CONFS)
|
||||||
for config in configs:
|
for config in configs:
|
||||||
self.assertIsInstance(config, p.Config)
|
self.assertIsInstance(config, p.Config)
|
||||||
|
|
||||||
def test_init_general_configs(self):
|
def test_init_general_configs(self):
|
||||||
sample_configs = [c_helper.ENABLE_SWIFT, c_helper.ENABLE_MYSQL,
|
sample_configs = [config_helper.ENABLE_SWIFT,
|
||||||
c_helper.DATANODES_STARTUP_TIMEOUT,
|
config_helper.ENABLE_MYSQL,
|
||||||
c_helper.DATANODES_DECOMMISSIONING_TIMEOUT,
|
config_helper.DATANODES_STARTUP_TIMEOUT,
|
||||||
c_helper.NODEMANAGERS_DECOMMISSIONING_TIMEOUT]
|
config_helper.DATANODES_DECOMMISSIONING_TIMEOUT,
|
||||||
|
config_helper.NODEMANAGERS_DECOMMISSIONING_TIMEOUT]
|
||||||
self.CONF.enable_data_locality = False
|
self.CONF.enable_data_locality = False
|
||||||
self.assertEqual(c_helper._init_general_configs(), sample_configs)
|
self.assertEqual(config_helper._init_general_configs(), sample_configs)
|
||||||
|
|
||||||
sample_configs.append(c_helper.ENABLE_DATA_LOCALITY)
|
sample_configs.append(config_helper.ENABLE_DATA_LOCALITY)
|
||||||
self.CONF.enable_data_locality = True
|
self.CONF.enable_data_locality = True
|
||||||
self.assertEqual(c_helper._init_general_configs(), sample_configs)
|
self.assertEqual(config_helper._init_general_configs(), sample_configs)
|
||||||
|
|
||||||
def test_get_config_value(self):
|
def test_get_config_value(self):
|
||||||
cluster = mock.Mock()
|
cluster = mock.Mock()
|
||||||
|
@ -78,8 +79,8 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
cl = 'test'
|
cl = 'test'
|
||||||
ng.configuration.return_value.get.return_value.get.return_value = cl
|
ng.configuration.return_value.get.return_value.get.return_value = cl
|
||||||
cluster.node_groups = [ng]
|
cluster.node_groups = [ng]
|
||||||
cl_param = c_helper.get_config_value('pctx', 'service',
|
cl_param = config_helper.get_config_value('pctx', 'service',
|
||||||
'name', cluster)
|
'name', cluster)
|
||||||
self.assertEqual(cl, cl_param)
|
self.assertEqual(cl, cl_param)
|
||||||
|
|
||||||
all_confs = mock.Mock()
|
all_confs = mock.Mock()
|
||||||
|
@ -87,46 +88,48 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
all_confs.name = 'name'
|
all_confs.name = 'name'
|
||||||
all_confs.default_value = 'default'
|
all_confs.default_value = 'default'
|
||||||
pctx = {'all_confs': [all_confs]}
|
pctx = {'all_confs': [all_confs]}
|
||||||
value = c_helper.get_config_value(pctx, 'service', 'name')
|
value = config_helper.get_config_value(pctx, 'service', 'name')
|
||||||
self.assertEqual(value, 'default')
|
self.assertEqual(value, 'default')
|
||||||
|
|
||||||
pctx = {'all_confs': []}
|
pctx = {'all_confs': []}
|
||||||
self.assertRaises(ex.NotFoundException, c_helper.get_config_value,
|
self.assertRaises(ex.PluginNotFoundException,
|
||||||
pctx, 'service', 'name')
|
config_helper.get_config_value, pctx, 'service',
|
||||||
|
'name')
|
||||||
|
|
||||||
@mock.patch(plugin_path + 'config_helper.get_config_value')
|
@mock.patch(plugin_path + 'config_helper.get_config_value')
|
||||||
def test_is_swift_enabled(self, get_config_value):
|
def test_is_swift_enabled(self, get_config_value):
|
||||||
target = c_helper.ENABLE_SWIFT.applicable_target
|
target = config_helper.ENABLE_SWIFT.applicable_target
|
||||||
name = c_helper.ENABLE_SWIFT.name
|
name = config_helper.ENABLE_SWIFT.name
|
||||||
c_helper.is_swift_enabled(self.pctx, self.cluster)
|
config_helper.is_swift_enabled(self.pctx, self.cluster)
|
||||||
get_config_value.assert_called_once_with(self.pctx, target,
|
get_config_value.assert_called_once_with(self.pctx, target,
|
||||||
name, self.cluster)
|
name, self.cluster)
|
||||||
|
|
||||||
@mock.patch(plugin_path + 'config_helper.get_config_value')
|
@mock.patch(plugin_path + 'config_helper.get_config_value')
|
||||||
def test_is_mysql_enabled(self, get_config_value):
|
def test_is_mysql_enabled(self, get_config_value):
|
||||||
target = c_helper.ENABLE_MYSQL.applicable_target
|
target = config_helper.ENABLE_MYSQL.applicable_target
|
||||||
name = c_helper.ENABLE_MYSQL.name
|
name = config_helper.ENABLE_MYSQL.name
|
||||||
c_helper.is_mysql_enabled(self.pctx, self.cluster)
|
config_helper.is_mysql_enabled(self.pctx, self.cluster)
|
||||||
get_config_value.assert_called_once_with(self.pctx, target,
|
get_config_value.assert_called_once_with(self.pctx, target,
|
||||||
name, self.cluster)
|
name, self.cluster)
|
||||||
|
|
||||||
@mock.patch(plugin_path + 'config_helper.get_config_value')
|
@mock.patch(plugin_path + 'config_helper.get_config_value')
|
||||||
def test_is_data_locality_enabled(self, get_config_value):
|
def test_is_data_locality_enabled(self, get_config_value):
|
||||||
self.CONF.enable_data_locality = False
|
self.CONF.enable_data_locality = False
|
||||||
enabled = c_helper.is_data_locality_enabled(self.pctx, self.cluster)
|
enabled = config_helper.is_data_locality_enabled(self.pctx,
|
||||||
|
self.cluster)
|
||||||
self.assertEqual(enabled, False)
|
self.assertEqual(enabled, False)
|
||||||
|
|
||||||
self.CONF.enable_data_locality = True
|
self.CONF.enable_data_locality = True
|
||||||
target = c_helper.ENABLE_DATA_LOCALITY.applicable_target
|
target = config_helper.ENABLE_DATA_LOCALITY.applicable_target
|
||||||
name = c_helper.ENABLE_DATA_LOCALITY.name
|
name = config_helper.ENABLE_DATA_LOCALITY.name
|
||||||
c_helper.is_data_locality_enabled(self.pctx, self.cluster)
|
config_helper.is_data_locality_enabled(self.pctx, self.cluster)
|
||||||
get_config_value.assert_called_once_with(self.pctx, target,
|
get_config_value.assert_called_once_with(self.pctx, target,
|
||||||
name, self.cluster)
|
name, self.cluster)
|
||||||
|
|
||||||
def test_generate_spark_env_configs(self):
|
def test_generate_spark_env_configs(self):
|
||||||
configs = 'HADOOP_CONF_DIR=/opt/hadoop/etc/hadoop\n' \
|
configs = 'HADOOP_CONF_DIR=/opt/hadoop/etc/hadoop\n' \
|
||||||
'YARN_CONF_DIR=/opt/hadoop/etc/hadoop'
|
'YARN_CONF_DIR=/opt/hadoop/etc/hadoop'
|
||||||
ret = c_helper.generate_spark_env_configs(self.cluster)
|
ret = config_helper.generate_spark_env_configs(self.cluster)
|
||||||
self.assertEqual(ret, configs)
|
self.assertEqual(ret, configs)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
||||||
|
@ -134,17 +137,17 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
get_config_value_or_default):
|
get_config_value_or_default):
|
||||||
get_config_value_or_default.return_value = None
|
get_config_value_or_default.return_value = None
|
||||||
path = 'Executor extra classpath'
|
path = 'Executor extra classpath'
|
||||||
ret = c_helper.generate_spark_executor_classpath(self.cluster)
|
ret = config_helper.generate_spark_executor_classpath(self.cluster)
|
||||||
get_config_value_or_default.assert_called_once_with('Spark',
|
get_config_value_or_default.assert_called_once_with('Spark',
|
||||||
path,
|
path,
|
||||||
self.cluster)
|
self.cluster)
|
||||||
self.assertEqual(ret, '\n')
|
self.assertEqual(ret, '\n')
|
||||||
|
|
||||||
get_config_value_or_default.return_value = 'test'
|
get_config_value_or_default.return_value = 'test'
|
||||||
ret = c_helper.generate_spark_executor_classpath(self.cluster)
|
ret = config_helper.generate_spark_executor_classpath(self.cluster)
|
||||||
self.assertEqual(ret, 'spark.executor.extraClassPath test')
|
self.assertEqual(ret, 'spark.executor.extraClassPath test')
|
||||||
|
|
||||||
@mock.patch('sahara.utils.files.get_file_text')
|
@mock.patch('sahara.plugins.utils.get_file_text')
|
||||||
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
||||||
def test_generate_job_cleanup_config(self,
|
def test_generate_job_cleanup_config(self,
|
||||||
get_config_value_or_default,
|
get_config_value_or_default,
|
||||||
|
@ -160,26 +163,26 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
'script': script}
|
'script': script}
|
||||||
get_file_text.return_value = cron
|
get_file_text.return_value = cron
|
||||||
get_config_value_or_default.return_value = 1
|
get_config_value_or_default.return_value = 1
|
||||||
ret = c_helper.generate_job_cleanup_config(self.cluster)
|
ret = config_helper.generate_job_cleanup_config(self.cluster)
|
||||||
self.assertEqual(get_config_value_or_default.call_count, 3)
|
self.assertEqual(get_config_value_or_default.call_count, 3)
|
||||||
self.assertEqual(get_file_text.call_count, 2)
|
self.assertEqual(get_file_text.call_count, 2)
|
||||||
self.assertEqual(ret, job_conf)
|
self.assertEqual(ret, job_conf)
|
||||||
|
|
||||||
job_conf = {'valid': False}
|
job_conf = {'valid': False}
|
||||||
get_config_value_or_default.return_value = 0
|
get_config_value_or_default.return_value = 0
|
||||||
ret = c_helper.generate_job_cleanup_config(self.cluster)
|
ret = config_helper.generate_job_cleanup_config(self.cluster)
|
||||||
self.assertEqual(get_config_value_or_default.call_count, 6)
|
self.assertEqual(get_config_value_or_default.call_count, 6)
|
||||||
self.assertEqual(ret, job_conf)
|
self.assertEqual(ret, job_conf)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
||||||
def test_get_spark_home(self, get_config_value_or_default):
|
def test_get_spark_home(self, get_config_value_or_default):
|
||||||
get_config_value_or_default.return_value = 1
|
get_config_value_or_default.return_value = 1
|
||||||
self.assertEqual(c_helper.get_spark_home(self.cluster), 1)
|
self.assertEqual(config_helper.get_spark_home(self.cluster), 1)
|
||||||
get_config_value_or_default.assert_called_once_with('Spark',
|
get_config_value_or_default.assert_called_once_with('Spark',
|
||||||
'Spark home',
|
'Spark home',
|
||||||
self.cluster)
|
self.cluster)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.files.get_file_text')
|
@mock.patch('sahara.plugins.utils.get_file_text')
|
||||||
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
@mock.patch('sahara.plugins.utils.get_config_value_or_default')
|
||||||
def test_generate_zk_basic_config(self, get_config_value_or_default,
|
def test_generate_zk_basic_config(self, get_config_value_or_default,
|
||||||
get_file_text):
|
get_file_text):
|
||||||
|
@ -191,6 +194,6 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
get_config_value_or_default.return_value = 5
|
get_config_value_or_default.return_value = 5
|
||||||
get_file_text.return_value = key
|
get_file_text.return_value = key
|
||||||
|
|
||||||
ret = c_helper.generate_zk_basic_config(self.cluster)
|
ret = config_helper.generate_zk_basic_config(self.cluster)
|
||||||
self.assertEqual(get_config_value_or_default.call_count, 3)
|
self.assertEqual(get_config_value_or_default.call_count, 3)
|
||||||
self.assertEqual(ret, actual)
|
self.assertEqual(ret, actual)
|
|
@ -13,8 +13,8 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.plugins.vanilla.hadoop2 import config as c
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config as c
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class VanillaTwoConfigTestCase(base.SaharaTestCase):
|
class VanillaTwoConfigTestCase(base.SaharaTestCase):
|
|
@ -15,19 +15,22 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
|
from sahara.plugins import base as pb
|
||||||
from sahara.plugins import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins.vanilla.hadoop2 import edp_engine
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import edp_engine
|
||||||
from sahara.tests.unit import base as sahara_base
|
from sahara_plugin_vanilla.tests.unit import base as sahara_base
|
||||||
|
|
||||||
|
|
||||||
class EdpOozieEngineTest(sahara_base.SaharaTestCase):
|
class EdpOozieEngineTest(sahara_base.SaharaTestCase):
|
||||||
|
|
||||||
engine_path = 'sahara.service.edp.oozie.engine.'
|
engine_path = 'sahara.plugins.edp.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(EdpOozieEngineTest, self).setUp()
|
super(EdpOozieEngineTest, self).setUp()
|
||||||
self.cluster = mock.Mock()
|
self.cluster = mock.Mock()
|
||||||
with mock.patch('sahara.service.edp.job_utils.get_plugin',
|
self.override_config("plugins", ["vanilla"])
|
||||||
|
pb.setup_plugins()
|
||||||
|
with mock.patch('sahara.plugins.edp.get_plugin',
|
||||||
return_value='test_plugins'):
|
return_value='test_plugins'):
|
||||||
self.engine = edp_engine.EdpOozieEngine(self.cluster)
|
self.engine = edp_engine.EdpOozieEngine(self.cluster)
|
||||||
|
|
||||||
|
@ -48,14 +51,14 @@ class EdpOozieEngineTest(sahara_base.SaharaTestCase):
|
||||||
ret = self.engine.get_oozie_server_uri(cluster)
|
ret = self.engine.get_oozie_server_uri(cluster)
|
||||||
self.assertEqual(ret, 'test_url/oozie/')
|
self.assertEqual(ret, 'test_url/oozie/')
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_oozie')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_oozie')
|
||||||
def test_get_oozie_server(self, get_oozie):
|
def test_get_oozie_server(self, get_oozie):
|
||||||
get_oozie.return_value = 'bingo'
|
get_oozie.return_value = 'bingo'
|
||||||
ret = self.engine.get_oozie_server(self.cluster)
|
ret = self.engine.get_oozie_server(self.cluster)
|
||||||
get_oozie.assert_called_once_with(self.cluster)
|
get_oozie.assert_called_once_with(self.cluster)
|
||||||
self.assertEqual(ret, 'bingo')
|
self.assertEqual(ret, 'bingo')
|
||||||
|
|
||||||
@mock.patch(engine_path + 'OozieJobEngine.validate_job_execution')
|
@mock.patch(engine_path + 'PluginsOozieJobEngine.validate_job_execution')
|
||||||
@mock.patch('sahara.plugins.utils.get_instances_count')
|
@mock.patch('sahara.plugins.utils.get_instances_count')
|
||||||
def test_validate_job_execution(self,
|
def test_validate_job_execution(self,
|
||||||
get_instances_count,
|
get_instances_count,
|
||||||
|
@ -72,7 +75,7 @@ class EdpOozieEngineTest(sahara_base.SaharaTestCase):
|
||||||
validate_job_execution.assert_called_once_with(self.cluster,
|
validate_job_execution.assert_called_once_with(self.cluster,
|
||||||
job, data)
|
job, data)
|
||||||
|
|
||||||
@mock.patch('sahara.service.edp.hdfs_helper.create_dir_hadoop2')
|
@mock.patch('sahara.plugins.edp.create_dir_hadoop2')
|
||||||
def test_create_hdfs_dir(self, create_dir_hadoop2):
|
def test_create_hdfs_dir(self, create_dir_hadoop2):
|
||||||
self.engine.get_hdfs_user = mock.Mock(return_value='test_user')
|
self.engine.get_hdfs_user = mock.Mock(return_value='test_user')
|
||||||
remote = mock.Mock()
|
remote = mock.Mock()
|
|
@ -15,8 +15,8 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins.vanilla.hadoop2 import oozie_helper as o_helper
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import oozie_helper
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestOozieHelper(base.SaharaTestCase):
|
class TestOozieHelper(base.SaharaTestCase):
|
||||||
|
@ -46,10 +46,11 @@ class TestOozieHelper(base.SaharaTestCase):
|
||||||
'oozie.service.HadoopAccessorService.hadoop.configurations':
|
'oozie.service.HadoopAccessorService.hadoop.configurations':
|
||||||
'*=/root'
|
'*=/root'
|
||||||
}
|
}
|
||||||
ret = o_helper.get_oozie_required_xml_configs(hadoop_conf_dir)
|
ret = oozie_helper.get_oozie_required_xml_configs(hadoop_conf_dir)
|
||||||
self.assertEqual(ret, configs)
|
self.assertEqual(ret, configs)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.hadoop2.utils.get_oozie_password')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
'utils.get_oozie_password')
|
||||||
def test_get_oozie_mysql_configs(self, get_oozie_password):
|
def test_get_oozie_mysql_configs(self, get_oozie_password):
|
||||||
get_oozie_password.return_value = '123'
|
get_oozie_password.return_value = '123'
|
||||||
configs = {
|
configs = {
|
||||||
|
@ -61,6 +62,6 @@ class TestOozieHelper(base.SaharaTestCase):
|
||||||
'oozie.service.JPAService.jdbc.password': '123'
|
'oozie.service.JPAService.jdbc.password': '123'
|
||||||
}
|
}
|
||||||
cluster = mock.Mock()
|
cluster = mock.Mock()
|
||||||
ret = o_helper.get_oozie_mysql_configs(cluster)
|
ret = oozie_helper.get_oozie_mysql_configs(cluster)
|
||||||
get_oozie_password.assert_called_once_with(cluster)
|
get_oozie_password.assert_called_once_with(cluster)
|
||||||
self.assertEqual(ret, configs)
|
self.assertEqual(ret, configs)
|
|
@ -15,22 +15,20 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara import conductor as cond
|
|
||||||
from sahara import context
|
|
||||||
from sahara.plugins import base as pb
|
from sahara.plugins import base as pb
|
||||||
from sahara.tests.unit import base
|
from sahara.plugins import conductor
|
||||||
from sahara.utils import edp
|
from sahara.plugins import context
|
||||||
|
from sahara.plugins import edp
|
||||||
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
conductor = cond.API
|
|
||||||
|
|
||||||
|
|
||||||
class VanillaPluginTest(base.SaharaWithDbTestCase):
|
class VanillaPluginTest(base.SaharaWithDbTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(VanillaPluginTest, self).setUp()
|
super(VanillaPluginTest, self).setUp()
|
||||||
|
self.override_config("plugins", ["vanilla"])
|
||||||
pb.setup_plugins()
|
pb.setup_plugins()
|
||||||
|
|
||||||
@mock.patch('sahara.service.edp.hdfs_helper.create_dir_hadoop2')
|
@mock.patch('sahara.plugins.edp.create_dir_hadoop2')
|
||||||
def test_edp_calls_hadoop2_create_dir(self, create_dir):
|
def test_edp_calls_hadoop2_create_dir(self, create_dir):
|
||||||
for version in ['2.7.1']:
|
for version in ['2.7.1']:
|
||||||
cluster_dict = {
|
cluster_dict = {
|
|
@ -16,7 +16,7 @@
|
||||||
import mock
|
import mock
|
||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
from sahara.plugins.vanilla.hadoop2 import recommendations_utils as ru
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import recommendations_utils
|
||||||
|
|
||||||
CONFIGURATION_SCHEMA = {
|
CONFIGURATION_SCHEMA = {
|
||||||
'cluster_configs': {
|
'cluster_configs': {
|
||||||
|
@ -54,7 +54,7 @@ class TestVersionHandler(testtools.TestCase):
|
||||||
'HadoopAutoConfigsProvider')
|
'HadoopAutoConfigsProvider')
|
||||||
def test_recommend_configs(self, provider):
|
def test_recommend_configs(self, provider):
|
||||||
f_cluster, f_configs = mock.Mock(), mock.Mock()
|
f_cluster, f_configs = mock.Mock(), mock.Mock()
|
||||||
ru.recommend_configs(f_cluster, f_configs, False)
|
recommendations_utils.recommend_configs(f_cluster, f_configs, False)
|
||||||
self.assertEqual([
|
self.assertEqual([
|
||||||
mock.call(CONFIGURATION_SCHEMA, f_configs, f_cluster, False)
|
mock.call(CONFIGURATION_SCHEMA, f_configs, f_cluster, False)
|
||||||
], provider.call_args_list)
|
], provider.call_args_list)
|
|
@ -13,20 +13,30 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.i18n import _
|
|
||||||
|
def mock_event_wrapper(*args, **kwargs):
|
||||||
|
def decorator(f):
|
||||||
|
@wraps(f)
|
||||||
|
def decorated_function(*args, **kwargs):
|
||||||
|
return f(*args, **kwargs)
|
||||||
|
return decorated_function
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
from sahara.plugins import edp
|
||||||
from sahara.plugins import utils as pu
|
from sahara.plugins import utils as pu
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
mock.patch('sahara.plugins.utils.event_wrapper', mock_event_wrapper).start()
|
||||||
from sahara.plugins.vanilla.hadoop2 import run_scripts as rs
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
from sahara.utils import edp
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import run_scripts as rs
|
||||||
from sahara.utils import files
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class RunScriptsTest(base.SaharaTestCase):
|
class RunScriptsTest(base.SaharaTestCase):
|
||||||
|
|
||||||
PLUGINS_PATH = 'sahara.plugins.vanilla.hadoop2.'
|
PLUGINS_PATH = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(RunScriptsTest, self).setUp()
|
super(RunScriptsTest, self).setUp()
|
||||||
|
@ -36,10 +46,11 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.remote.__enter__ = self.remote
|
self.remote.__enter__ = self.remote
|
||||||
self.remote.__exit__ = mock.Mock()
|
self.remote.__exit__ = mock.Mock()
|
||||||
self.instance.remote.return_value = self.remote
|
self.instance.remote.return_value = self.remote
|
||||||
|
pu.event_wrapper = mock_event_wrapper
|
||||||
|
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts._start_processes')
|
@mock.patch(PLUGINS_PATH + 'run_scripts._start_processes')
|
||||||
@mock.patch('sahara.context.set_current_instance_id')
|
@mock.patch('sahara.plugins.context.set_current_instance_id')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
@mock.patch('sahara.plugins.utils.instances_with_services')
|
@mock.patch('sahara.plugins.utils.instances_with_services')
|
||||||
def test_start_dn_nm_processes(self, instances_with_services,
|
def test_start_dn_nm_processes(self, instances_with_services,
|
||||||
add_provisioning_step,
|
add_provisioning_step,
|
||||||
|
@ -60,21 +71,21 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
set_current_instance_id.assert_called_once_with('123')
|
set_current_instance_id.assert_called_once_with('123')
|
||||||
_start_processes.assert_called_once_with(ins, ['datanode'])
|
_start_processes.assert_called_once_with(ins, ['datanode'])
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
def test_start_processes_datanode(self, check_cluster_exists):
|
def test_start_processes_datanode(self, check_cluster_exists):
|
||||||
processes = ['datanode']
|
processes = ['datanode']
|
||||||
rs._start_processes(self.instance, processes)
|
rs._start_processes(self.instance, processes)
|
||||||
self.r.execute_command.assert_called_once_with(
|
self.r.execute_command.assert_called_once_with(
|
||||||
'sudo su - -c "hadoop-daemon.sh start datanode" hadoop')
|
'sudo su - -c "hadoop-daemon.sh start datanode" hadoop')
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
def test_start_processes_nodemanager(self, check_cluster_exists):
|
def test_start_processes_nodemanager(self, check_cluster_exists):
|
||||||
processes = ['nodemanager']
|
processes = ['nodemanager']
|
||||||
rs._start_processes(self.instance, processes)
|
rs._start_processes(self.instance, processes)
|
||||||
self.r.execute_command.assert_called_once_with(
|
self.r.execute_command.assert_called_once_with(
|
||||||
'sudo su - -c "yarn-daemon.sh start nodemanager" hadoop')
|
'sudo su - -c "yarn-daemon.sh start nodemanager" hadoop')
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
def test_start_processes_both(self, check_cluster_exists):
|
def test_start_processes_both(self, check_cluster_exists):
|
||||||
processes = ['datanode', 'nodemanager']
|
processes = ['datanode', 'nodemanager']
|
||||||
rs._start_processes(self.instance, processes)
|
rs._start_processes(self.instance, processes)
|
||||||
|
@ -95,8 +106,8 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.remote.execute_command.assert_called_once_with(
|
self.remote.execute_command.assert_called_once_with(
|
||||||
'sudo su - -c "yarn-daemon.sh start %s" hadoop' % process)
|
'sudo su - -c "yarn-daemon.sh start %s" hadoop' % process)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_start_historyserver(self, add_provisioning_step,
|
def test_start_historyserver(self, add_provisioning_step,
|
||||||
check_cluster_exists):
|
check_cluster_exists):
|
||||||
rs.start_historyserver(self.instance)
|
rs.start_historyserver(self.instance)
|
||||||
|
@ -109,9 +120,9 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts._start_mysql')
|
@mock.patch(PLUGINS_PATH + 'run_scripts._start_mysql')
|
||||||
@mock.patch(PLUGINS_PATH + 'config_helper.is_mysql_enabled')
|
@mock.patch(PLUGINS_PATH + 'config_helper.is_mysql_enabled')
|
||||||
@mock.patch(PLUGINS_PATH + 'utils.get_oozie_password')
|
@mock.patch(PLUGINS_PATH + 'utils.get_oozie_password')
|
||||||
@mock.patch('sahara.context.set_current_instance_id')
|
@mock.patch('sahara.plugins.context.set_current_instance_id')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_start_oozie_process(self, add_provisioning_step,
|
def test_start_oozie_process(self, add_provisioning_step,
|
||||||
check_cluster_exists,
|
check_cluster_exists,
|
||||||
set_current_instance_id, get_oozie_password,
|
set_current_instance_id, get_oozie_password,
|
||||||
|
@ -120,8 +131,9 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.instance.instance_id = '112233'
|
self.instance.instance_id = '112233'
|
||||||
pctx = mock.Mock()
|
pctx = mock.Mock()
|
||||||
is_mysql_enabled.return_value = True
|
is_mysql_enabled.return_value = True
|
||||||
sql_script = files.get_file_text(
|
sql_script = pu.get_file_text(
|
||||||
'plugins/vanilla/hadoop2/resources/create_oozie_db.sql')
|
'plugins/vanilla/hadoop2/resources/create_oozie_db.sql',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
get_oozie_password.return_value = '123'
|
get_oozie_password.return_value = '123'
|
||||||
pwd_script = sql_script.replace('password', '123')
|
pwd_script = sql_script.replace('password', '123')
|
||||||
rs.start_oozie_process(pctx, self.instance)
|
rs.start_oozie_process(pctx, self.instance)
|
||||||
|
@ -138,9 +150,9 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
_start_oozie.assert_called_once_with(self.r)
|
_start_oozie.assert_called_once_with(self.r)
|
||||||
|
|
||||||
@mock.patch(PLUGINS_PATH + 'config_helper.get_spark_home')
|
@mock.patch(PLUGINS_PATH + 'config_helper.get_spark_home')
|
||||||
@mock.patch('sahara.context.set_current_instance_id')
|
@mock.patch('sahara.plugins.context.set_current_instance_id')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_start_spark_history_server(self, add_provisioning_step,
|
def test_start_spark_history_server(self, add_provisioning_step,
|
||||||
check_cluster_exists,
|
check_cluster_exists,
|
||||||
set_current_instance_id,
|
set_current_instance_id,
|
||||||
|
@ -158,9 +170,9 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.remote.execute_command.assert_called_once_with(
|
self.remote.execute_command.assert_called_once_with(
|
||||||
'sudo su - -c "hdfs namenode -format" hadoop')
|
'sudo su - -c "hdfs namenode -format" hadoop')
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_namenode')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_namenode')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_refresh_hadoop_nodes(self, add_provisioning_step,
|
def test_refresh_hadoop_nodes(self, add_provisioning_step,
|
||||||
check_cluster_exists, get_namenode):
|
check_cluster_exists, get_namenode):
|
||||||
cluster = mock.Mock()
|
cluster = mock.Mock()
|
||||||
|
@ -170,9 +182,10 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.remote.execute_command.assert_called_once_with(
|
self.remote.execute_command.assert_called_once_with(
|
||||||
'sudo su - -c "hdfs dfsadmin -refreshNodes" hadoop')
|
'sudo su - -c "hdfs dfsadmin -refreshNodes" hadoop')
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_resourcemanager')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.'
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
'get_resourcemanager')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_refresh_yarn_nodes(self, add_provisioning_step,
|
def test_refresh_yarn_nodes(self, add_provisioning_step,
|
||||||
check_cluster_exists, get_resourcemanager):
|
check_cluster_exists, get_resourcemanager):
|
||||||
cluster = mock.Mock()
|
cluster = mock.Mock()
|
||||||
|
@ -207,11 +220,11 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.r.execute_command.assert_called_once_with(
|
self.r.execute_command.assert_called_once_with(
|
||||||
'sudo su - -c "/opt/oozie/bin/oozied.sh start" hadoop')
|
'sudo su - -c "/opt/oozie/bin/oozied.sh start" hadoop')
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_namenode')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_namenode')
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_datanodes')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_datanodes')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
@mock.patch('sahara.utils.poll_utils.plugin_option_poll')
|
@mock.patch('sahara.plugins.utils.plugin_option_poll')
|
||||||
def test_await_datanodes(self, plugin_option_poll, add_provisioning_step,
|
def test_await_datanodes(self, plugin_option_poll, add_provisioning_step,
|
||||||
check_cluster_exists, get_datanodes,
|
check_cluster_exists, get_datanodes,
|
||||||
get_namenode):
|
get_namenode):
|
||||||
|
@ -226,7 +239,7 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
get_namenode.return_value = namenode
|
get_namenode.return_value = namenode
|
||||||
mess = _('Waiting on 1 datanodes to start up')
|
mess = _('Waiting on 1 datanodes to start up')
|
||||||
test_data = {'remote': r, 'count': 1}
|
test_data = {'remote': r, 'count': 1}
|
||||||
timeout = c_helper.DATANODES_STARTUP_TIMEOUT
|
timeout = config_helper.DATANODES_STARTUP_TIMEOUT
|
||||||
rs.await_datanodes(cluster)
|
rs.await_datanodes(cluster)
|
||||||
get_datanodes.assert_called_once_with(cluster)
|
get_datanodes.assert_called_once_with(cluster)
|
||||||
get_namenode.assert_called_once_with(cluster)
|
get_namenode.assert_called_once_with(cluster)
|
||||||
|
@ -276,10 +289,10 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts._start_mysql')
|
@mock.patch(PLUGINS_PATH + 'run_scripts._start_mysql')
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts._hive_copy_shared_conf')
|
@mock.patch(PLUGINS_PATH + 'run_scripts._hive_copy_shared_conf')
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts._hive_create_warehouse_dir')
|
@mock.patch(PLUGINS_PATH + 'run_scripts._hive_create_warehouse_dir')
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_oozie')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_oozie')
|
||||||
@mock.patch('sahara.context.set_current_instance_id')
|
@mock.patch('sahara.plugins.context.set_current_instance_id')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_start_hiveserver_process(self, add_provisioning_step,
|
def test_start_hiveserver_process(self, add_provisioning_step,
|
||||||
check_cluster_exists,
|
check_cluster_exists,
|
||||||
set_current_instance_id, get_oozie,
|
set_current_instance_id, get_oozie,
|
||||||
|
@ -294,8 +307,9 @@ class RunScriptsTest(base.SaharaTestCase):
|
||||||
self.instance.cluster.hadoop_version = '2.7.1'
|
self.instance.cluster.hadoop_version = '2.7.1'
|
||||||
ng_cluster = self.instance.node_group.cluster
|
ng_cluster = self.instance.node_group.cluster
|
||||||
get_oozie.return_value = None
|
get_oozie.return_value = None
|
||||||
sql_script = files.get_file_text(
|
sql_script = pu.get_file_text(
|
||||||
'plugins/vanilla/v2_7_1/resources/create_hive_db.sql')
|
'plugins/vanilla/v2_7_1/resources/create_hive_db.sql',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
get_hive_password.return_value = '123'
|
get_hive_password.return_value = '123'
|
||||||
pwd_script = sql_script.replace('{{password}}', '123')
|
pwd_script = sql_script.replace('{{password}}', '123')
|
||||||
rs.start_hiveserver_process(pctx, self.instance)
|
rs.start_hiveserver_process(pctx, self.instance)
|
|
@ -15,16 +15,16 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.i18n import _
|
from sahara_plugin_vanilla.i18n import _
|
||||||
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import config_helper
|
||||||
from sahara.plugins.vanilla.hadoop2 import scaling
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import scaling
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as pu
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as pu
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class ScalingTest(base.SaharaTestCase):
|
class ScalingTest(base.SaharaTestCase):
|
||||||
|
|
||||||
PLUGINS_PATH = 'sahara.plugins.vanilla.hadoop2.'
|
PLUGINS_PATH = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(ScalingTest, self).setUp()
|
super(ScalingTest, self).setUp()
|
||||||
|
@ -37,8 +37,9 @@ class ScalingTest(base.SaharaTestCase):
|
||||||
return_value=self.r)
|
return_value=self.r)
|
||||||
self.instance.remote.return_value.__exit__ = mock.Mock()
|
self.instance.remote.return_value.__exit__ = mock.Mock()
|
||||||
|
|
||||||
@mock.patch('sahara.swift.swift_helper.install_ssl_certs')
|
@mock.patch('sahara.plugins.swift_helper.install_ssl_certs')
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_resourcemanager')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.'
|
||||||
|
'get_resourcemanager')
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts.refresh_zk_servers')
|
@mock.patch(PLUGINS_PATH + 'run_scripts.refresh_zk_servers')
|
||||||
@mock.patch(PLUGINS_PATH + 'config.configure_zookeeper')
|
@mock.patch(PLUGINS_PATH + 'config.configure_zookeeper')
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts.start_dn_nm_processes')
|
@mock.patch(PLUGINS_PATH + 'run_scripts.start_dn_nm_processes')
|
||||||
|
@ -81,10 +82,10 @@ class ScalingTest(base.SaharaTestCase):
|
||||||
ret = scaling._get_instances_with_service(instances, service)
|
ret = scaling._get_instances_with_service(instances, service)
|
||||||
self.assertEqual(ret, [ins_1])
|
self.assertEqual(ret, [ins_1])
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_nodemanagers')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_nodemanagers')
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_datanodes')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_datanodes')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
@mock.patch('sahara.plugins.utils.generate_fqdn_host_names')
|
@mock.patch('sahara.plugins.utils.generate_fqdn_host_names')
|
||||||
@mock.patch('sahara.plugins.utils.get_instances')
|
@mock.patch('sahara.plugins.utils.get_instances')
|
||||||
def test_update_include_files(self, get_instances,
|
def test_update_include_files(self, get_instances,
|
||||||
|
@ -120,7 +121,8 @@ class ScalingTest(base.SaharaTestCase):
|
||||||
host, DIR))]
|
host, DIR))]
|
||||||
self.r.execute_command.assert_has_calls(command_calls, any_order=True)
|
self.r.execute_command.assert_has_calls(command_calls, any_order=True)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_resourcemanager')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.'
|
||||||
|
'get_resourcemanager')
|
||||||
@mock.patch(PLUGINS_PATH + 'run_scripts.refresh_zk_servers')
|
@mock.patch(PLUGINS_PATH + 'run_scripts.refresh_zk_servers')
|
||||||
@mock.patch(PLUGINS_PATH + 'config.configure_zookeeper')
|
@mock.patch(PLUGINS_PATH + 'config.configure_zookeeper')
|
||||||
@mock.patch(PLUGINS_PATH + 'config.configure_topology_data')
|
@mock.patch(PLUGINS_PATH + 'config.configure_topology_data')
|
||||||
|
@ -217,7 +219,7 @@ class ScalingTest(base.SaharaTestCase):
|
||||||
ret = scaling.is_decommissioned(cluster, check_func, instances)
|
ret = scaling.is_decommissioned(cluster, check_func, instances)
|
||||||
self.assertEqual(ret, False)
|
self.assertEqual(ret, False)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.poll_utils.plugin_option_poll')
|
@mock.patch('sahara.plugins.utils.plugin_option_poll')
|
||||||
def test_check_decommission(self, plugin_option_poll):
|
def test_check_decommission(self, plugin_option_poll):
|
||||||
check_func = mock.Mock()
|
check_func = mock.Mock()
|
||||||
option = mock.Mock()
|
option = mock.Mock()
|
||||||
|
@ -233,12 +235,12 @@ class ScalingTest(base.SaharaTestCase):
|
||||||
sample_dict)
|
sample_dict)
|
||||||
|
|
||||||
@mock.patch(PLUGINS_PATH + 'scaling._check_decommission')
|
@mock.patch(PLUGINS_PATH + 'scaling._check_decommission')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_check_nodemanagers_decommission(self, add_provisioning_step,
|
def test_check_nodemanagers_decommission(self, add_provisioning_step,
|
||||||
check_cluster_exists,
|
check_cluster_exists,
|
||||||
_check_decommission):
|
_check_decommission):
|
||||||
timeout = c_helper.NODEMANAGERS_DECOMMISSIONING_TIMEOUT
|
timeout = config_helper.NODEMANAGERS_DECOMMISSIONING_TIMEOUT
|
||||||
status = pu.get_nodemanagers_status
|
status = pu.get_nodemanagers_status
|
||||||
scaling._check_nodemanagers_decommission(self.cluster, self.instances)
|
scaling._check_nodemanagers_decommission(self.cluster, self.instances)
|
||||||
_check_decommission.assert_called_once_with(self.cluster,
|
_check_decommission.assert_called_once_with(self.cluster,
|
||||||
|
@ -246,12 +248,12 @@ class ScalingTest(base.SaharaTestCase):
|
||||||
status, timeout)
|
status, timeout)
|
||||||
|
|
||||||
@mock.patch(PLUGINS_PATH + 'scaling._check_decommission')
|
@mock.patch(PLUGINS_PATH + 'scaling._check_decommission')
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch('sahara.utils.cluster_progress_ops.add_provisioning_step')
|
@mock.patch('sahara.plugins.utils.add_provisioning_step')
|
||||||
def test_check_datanodes_decommission(self, add_provisioning_step,
|
def test_check_datanodes_decommission(self, add_provisioning_step,
|
||||||
check_cluster_exists,
|
check_cluster_exists,
|
||||||
_check_decommission):
|
_check_decommission):
|
||||||
timeout = c_helper.DATANODES_DECOMMISSIONING_TIMEOUT
|
timeout = config_helper.DATANODES_DECOMMISSIONING_TIMEOUT
|
||||||
status = pu.get_datanodes_status
|
status = pu.get_datanodes_status
|
||||||
scaling._check_datanodes_decommission(self.cluster, self.instances)
|
scaling._check_datanodes_decommission(self.cluster, self.instances)
|
||||||
_check_decommission.assert_called_once_with(self.cluster,
|
_check_decommission.assert_called_once_with(self.cluster,
|
|
@ -15,13 +15,13 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins.vanilla.hadoop2 import starting_scripts as s_scripts
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import starting_scripts
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class StartingScriptsTest(base.SaharaTestCase):
|
class StartingScriptsTest(base.SaharaTestCase):
|
||||||
|
|
||||||
plugins_path = 'sahara.plugins.vanilla.'
|
plugins_path = 'sahara_plugin_vanilla.plugins.vanilla.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(StartingScriptsTest, self).setUp()
|
super(StartingScriptsTest, self).setUp()
|
||||||
|
@ -32,11 +32,11 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
def test_start_namenode(self, _start_namenode, get_namenode):
|
def test_start_namenode(self, _start_namenode, get_namenode):
|
||||||
namenode = mock.Mock()
|
namenode = mock.Mock()
|
||||||
get_namenode.return_value = namenode
|
get_namenode.return_value = namenode
|
||||||
s_scripts.start_namenode(self.cluster)
|
starting_scripts.start_namenode(self.cluster)
|
||||||
get_namenode.assert_called_once_with(self.cluster)
|
get_namenode.assert_called_once_with(self.cluster)
|
||||||
_start_namenode.assert_called_once_with(namenode)
|
_start_namenode.assert_called_once_with(namenode)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch(plugins_path + 'hadoop2.run_scripts.start_hadoop_process')
|
@mock.patch(plugins_path + 'hadoop2.run_scripts.start_hadoop_process')
|
||||||
@mock.patch(plugins_path + 'hadoop2.run_scripts.format_namenode')
|
@mock.patch(plugins_path + 'hadoop2.run_scripts.format_namenode')
|
||||||
def test__start_namenode(self, format_namenode,
|
def test__start_namenode(self, format_namenode,
|
||||||
|
@ -44,7 +44,7 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
check_cluster_exists):
|
check_cluster_exists):
|
||||||
check_cluster_exists.return_value = None
|
check_cluster_exists.return_value = None
|
||||||
nn = mock.Mock()
|
nn = mock.Mock()
|
||||||
s_scripts._start_namenode(nn)
|
starting_scripts._start_namenode(nn)
|
||||||
format_namenode.assert_called_once_with(nn)
|
format_namenode.assert_called_once_with(nn)
|
||||||
start_hadoop_process.assert_called_once_with(nn, 'namenode')
|
start_hadoop_process.assert_called_once_with(nn, 'namenode')
|
||||||
|
|
||||||
|
@ -54,21 +54,21 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
def test_start_secondarynamenode(self, get_secondarynamenode,
|
def test_start_secondarynamenode(self, get_secondarynamenode,
|
||||||
_start_secondarynamenode):
|
_start_secondarynamenode):
|
||||||
get_secondarynamenode.return_value = 0
|
get_secondarynamenode.return_value = 0
|
||||||
s_scripts.start_secondarynamenode(self.cluster)
|
starting_scripts.start_secondarynamenode(self.cluster)
|
||||||
get_secondarynamenode.assert_called_once_with(self.cluster)
|
get_secondarynamenode.assert_called_once_with(self.cluster)
|
||||||
|
|
||||||
get_secondarynamenode.return_value = 1
|
get_secondarynamenode.return_value = 1
|
||||||
s_scripts.start_secondarynamenode(self.cluster)
|
starting_scripts.start_secondarynamenode(self.cluster)
|
||||||
_start_secondarynamenode.assert_called_once_with(1)
|
_start_secondarynamenode.assert_called_once_with(1)
|
||||||
self.assertEqual(get_secondarynamenode.call_count, 2)
|
self.assertEqual(get_secondarynamenode.call_count, 2)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch(plugins_path + 'hadoop2.run_scripts.start_hadoop_process')
|
@mock.patch(plugins_path + 'hadoop2.run_scripts.start_hadoop_process')
|
||||||
def test__start_secondarynamenode(self, start_hadoop_process,
|
def test__start_secondarynamenode(self, start_hadoop_process,
|
||||||
check_cluster_exists):
|
check_cluster_exists):
|
||||||
check_cluster_exists.return_value = None
|
check_cluster_exists.return_value = None
|
||||||
snn = mock.Mock()
|
snn = mock.Mock()
|
||||||
s_scripts._start_secondarynamenode(snn)
|
starting_scripts._start_secondarynamenode(snn)
|
||||||
start_hadoop_process.assert_called_once_with(snn,
|
start_hadoop_process.assert_called_once_with(snn,
|
||||||
'secondarynamenode')
|
'secondarynamenode')
|
||||||
|
|
||||||
|
@ -78,21 +78,21 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
def test_start_resourcemanager(self, get_resourcemanager,
|
def test_start_resourcemanager(self, get_resourcemanager,
|
||||||
_start_resourcemanager):
|
_start_resourcemanager):
|
||||||
get_resourcemanager.return_value = 0
|
get_resourcemanager.return_value = 0
|
||||||
s_scripts.start_resourcemanager(self.cluster)
|
starting_scripts.start_resourcemanager(self.cluster)
|
||||||
get_resourcemanager.assert_called_once_with(self.cluster)
|
get_resourcemanager.assert_called_once_with(self.cluster)
|
||||||
|
|
||||||
get_resourcemanager.return_value = 1
|
get_resourcemanager.return_value = 1
|
||||||
s_scripts.start_resourcemanager(self.cluster)
|
starting_scripts.start_resourcemanager(self.cluster)
|
||||||
_start_resourcemanager.assert_called_once_with(1)
|
_start_resourcemanager.assert_called_once_with(1)
|
||||||
self.assertEqual(get_resourcemanager.call_count, 2)
|
self.assertEqual(get_resourcemanager.call_count, 2)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.cluster.check_cluster_exists')
|
@mock.patch('sahara.plugins.utils.check_cluster_exists')
|
||||||
@mock.patch(plugins_path + 'hadoop2.run_scripts.start_yarn_process')
|
@mock.patch(plugins_path + 'hadoop2.run_scripts.start_yarn_process')
|
||||||
def test__start_resourcemanager(self, start_yarn_process,
|
def test__start_resourcemanager(self, start_yarn_process,
|
||||||
check_cluster_exists):
|
check_cluster_exists):
|
||||||
check_cluster_exists.return_value = None
|
check_cluster_exists.return_value = None
|
||||||
snn = mock.Mock()
|
snn = mock.Mock()
|
||||||
s_scripts._start_resourcemanager(snn)
|
starting_scripts._start_resourcemanager(snn)
|
||||||
start_yarn_process.assert_called_once_with(snn,
|
start_yarn_process.assert_called_once_with(snn,
|
||||||
'resourcemanager')
|
'resourcemanager')
|
||||||
|
|
||||||
|
@ -101,11 +101,11 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
def test_start_historyserver(self, get_historyserver,
|
def test_start_historyserver(self, get_historyserver,
|
||||||
start_historyserver):
|
start_historyserver):
|
||||||
get_historyserver.return_value = 0
|
get_historyserver.return_value = 0
|
||||||
s_scripts.start_historyserver(self.cluster)
|
starting_scripts.start_historyserver(self.cluster)
|
||||||
get_historyserver.assert_called_once_with(self.cluster)
|
get_historyserver.assert_called_once_with(self.cluster)
|
||||||
|
|
||||||
get_historyserver.return_value = 1
|
get_historyserver.return_value = 1
|
||||||
s_scripts.start_historyserver(self.cluster)
|
starting_scripts.start_historyserver(self.cluster)
|
||||||
start_historyserver.assert_called_once_with(1)
|
start_historyserver.assert_called_once_with(1)
|
||||||
self.assertEqual(get_historyserver.call_count, 2)
|
self.assertEqual(get_historyserver.call_count, 2)
|
||||||
|
|
||||||
|
@ -114,11 +114,11 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
def test_start_oozie(self, get_oozie, start_oozie_process):
|
def test_start_oozie(self, get_oozie, start_oozie_process):
|
||||||
pctx = mock.Mock()
|
pctx = mock.Mock()
|
||||||
get_oozie.return_value = 0
|
get_oozie.return_value = 0
|
||||||
s_scripts.start_oozie(pctx, self.cluster)
|
starting_scripts.start_oozie(pctx, self.cluster)
|
||||||
get_oozie.assert_called_once_with(self.cluster)
|
get_oozie.assert_called_once_with(self.cluster)
|
||||||
|
|
||||||
get_oozie.return_value = 1
|
get_oozie.return_value = 1
|
||||||
s_scripts.start_oozie(pctx, self.cluster)
|
starting_scripts.start_oozie(pctx, self.cluster)
|
||||||
start_oozie_process.assert_called_once_with(pctx, 1)
|
start_oozie_process.assert_called_once_with(pctx, 1)
|
||||||
self.assertEqual(get_oozie.call_count, 2)
|
self.assertEqual(get_oozie.call_count, 2)
|
||||||
|
|
||||||
|
@ -129,11 +129,11 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
start_hiveserver_process):
|
start_hiveserver_process):
|
||||||
pctx = mock.Mock()
|
pctx = mock.Mock()
|
||||||
get_hiveserver.return_value = 0
|
get_hiveserver.return_value = 0
|
||||||
s_scripts.start_hiveserver(pctx, self.cluster)
|
starting_scripts.start_hiveserver(pctx, self.cluster)
|
||||||
get_hiveserver.assert_called_once_with(self.cluster)
|
get_hiveserver.assert_called_once_with(self.cluster)
|
||||||
|
|
||||||
get_hiveserver.return_value = 1
|
get_hiveserver.return_value = 1
|
||||||
s_scripts.start_hiveserver(pctx, self.cluster)
|
starting_scripts.start_hiveserver(pctx, self.cluster)
|
||||||
start_hiveserver_process.assert_called_once_with(pctx, 1)
|
start_hiveserver_process.assert_called_once_with(pctx, 1)
|
||||||
self.assertEqual(get_hiveserver.call_count, 2)
|
self.assertEqual(get_hiveserver.call_count, 2)
|
||||||
|
|
||||||
|
@ -143,10 +143,10 @@ class StartingScriptsTest(base.SaharaTestCase):
|
||||||
def test_start_spark(self, get_spark_history_server,
|
def test_start_spark(self, get_spark_history_server,
|
||||||
start_spark_history_server):
|
start_spark_history_server):
|
||||||
get_spark_history_server.return_value = 0
|
get_spark_history_server.return_value = 0
|
||||||
s_scripts.start_spark(self.cluster)
|
starting_scripts.start_spark(self.cluster)
|
||||||
get_spark_history_server.assert_called_once_with(self.cluster)
|
get_spark_history_server.assert_called_once_with(self.cluster)
|
||||||
|
|
||||||
get_spark_history_server.return_value = 1
|
get_spark_history_server.return_value = 1
|
||||||
s_scripts.start_spark(self.cluster)
|
starting_scripts.start_spark(self.cluster)
|
||||||
start_spark_history_server.assert_called_once_with(1)
|
start_spark_history_server.assert_called_once_with(1)
|
||||||
self.assertEqual(get_spark_history_server.call_count, 2)
|
self.assertEqual(get_spark_history_server.call_count, 2)
|
|
@ -15,16 +15,17 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins.vanilla.hadoop2 import utils as u
|
from sahara.plugins import utils
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.plugins.vanilla.hadoop2 import utils as u
|
||||||
from sahara.utils import files
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class UtilsTestCase(base.SaharaTestCase):
|
class UtilsTestCase(base.SaharaTestCase):
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_namenode')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.get_namenode')
|
||||||
def test_datanodes_status(self, nn):
|
def test_datanodes_status(self, nn):
|
||||||
report = files.get_file_text(
|
report = utils.get_file_text(
|
||||||
'tests/unit/plugins/vanilla/hadoop2/resources/dfs-report.txt')
|
'tests/unit/plugins/vanilla/hadoop2/resources/dfs-report.txt',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
nn.return_value = self._get_instance(report)
|
nn.return_value = self._get_instance(report)
|
||||||
statuses = u.get_datanodes_status(None)
|
statuses = u.get_datanodes_status(None)
|
||||||
|
@ -38,10 +39,12 @@ class UtilsTestCase(base.SaharaTestCase):
|
||||||
|
|
||||||
self.assertEqual(expected, statuses)
|
self.assertEqual(expected, statuses)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.utils.get_resourcemanager')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils.'
|
||||||
|
'get_resourcemanager')
|
||||||
def test_nodemanagers_status(self, rm):
|
def test_nodemanagers_status(self, rm):
|
||||||
report = files.get_file_text(
|
report = utils.get_file_text(
|
||||||
'tests/unit/plugins/vanilla/hadoop2/resources/yarn-report.txt')
|
'tests/unit/plugins/vanilla/hadoop2/resources/yarn-report.txt',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
|
|
||||||
rm.return_value = self._get_instance(report)
|
rm.return_value = self._get_instance(report)
|
||||||
statuses = u.get_nodemanagers_status(None)
|
statuses = u.get_nodemanagers_status(None)
|
||||||
|
@ -65,11 +68,11 @@ class UtilsTestCase(base.SaharaTestCase):
|
||||||
|
|
||||||
return inst
|
return inst
|
||||||
|
|
||||||
@mock.patch('sahara.conductor.API.cluster_get')
|
@mock.patch('sahara.plugins.conductor.cluster_get')
|
||||||
@mock.patch('sahara.service.castellan.utils.get_secret')
|
@mock.patch('sahara.plugins.castellan_utils.get_secret')
|
||||||
@mock.patch('sahara.service.castellan.utils.store_secret')
|
@mock.patch('sahara.plugins.castellan_utils.store_secret')
|
||||||
@mock.patch('sahara.plugins.vanilla.utils')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.utils')
|
||||||
@mock.patch('sahara.conductor.API.cluster_update')
|
@mock.patch('sahara.plugins.conductor.cluster_update')
|
||||||
def test_oozie_password(self, cluster_update, vu,
|
def test_oozie_password(self, cluster_update, vu,
|
||||||
store_secret, get_secret, conductor):
|
store_secret, get_secret, conductor):
|
||||||
cluster = mock.MagicMock()
|
cluster = mock.MagicMock()
|
||||||
|
@ -91,7 +94,7 @@ class UtilsTestCase(base.SaharaTestCase):
|
||||||
result = u.get_oozie_password(cluster)
|
result = u.get_oozie_password(cluster)
|
||||||
self.assertEqual('oozie_pass', result)
|
self.assertEqual('oozie_pass', result)
|
||||||
|
|
||||||
@mock.patch('sahara.service.castellan.utils.delete_secret')
|
@mock.patch('sahara.plugins.castellan_utils.delete_secret')
|
||||||
def test_delete_oozie_password(self, delete_secret):
|
def test_delete_oozie_password(self, delete_secret):
|
||||||
cluster = mock.MagicMock()
|
cluster = mock.MagicMock()
|
||||||
cluster.extra.to_dict = mock.MagicMock()
|
cluster.extra.to_dict = mock.MagicMock()
|
||||||
|
@ -104,10 +107,10 @@ class UtilsTestCase(base.SaharaTestCase):
|
||||||
u.delete_oozie_password(cluster)
|
u.delete_oozie_password(cluster)
|
||||||
delete_secret.assert_called_once_with("31415926")
|
delete_secret.assert_called_once_with("31415926")
|
||||||
|
|
||||||
@mock.patch('sahara.conductor.API.cluster_get')
|
@mock.patch('sahara.plugins.conductor.cluster_get')
|
||||||
@mock.patch('sahara.service.castellan.utils.get_secret')
|
@mock.patch('sahara.plugins.castellan_utils.get_secret')
|
||||||
@mock.patch('sahara.service.castellan.utils.store_secret')
|
@mock.patch('sahara.plugins.castellan_utils.store_secret')
|
||||||
@mock.patch('sahara.conductor.API.cluster_update')
|
@mock.patch('sahara.plugins.conductor.cluster_update')
|
||||||
def test_get_hive_password(self, cluster_update,
|
def test_get_hive_password(self, cluster_update,
|
||||||
store_secret, get_secret, conductor):
|
store_secret, get_secret, conductor):
|
||||||
cluster = mock.MagicMock()
|
cluster = mock.MagicMock()
|
||||||
|
@ -127,7 +130,7 @@ class UtilsTestCase(base.SaharaTestCase):
|
||||||
result = u.get_hive_password(cluster)
|
result = u.get_hive_password(cluster)
|
||||||
self.assertEqual('hive_pass', result)
|
self.assertEqual('hive_pass', result)
|
||||||
|
|
||||||
@mock.patch('sahara.service.castellan.utils.delete_secret')
|
@mock.patch('sahara.plugins.castellan_utils.delete_secret')
|
||||||
def test_delete_hive_password(self, delete_secret):
|
def test_delete_hive_password(self, delete_secret):
|
||||||
cluster = mock.MagicMock()
|
cluster = mock.MagicMock()
|
||||||
|
|
|
@ -16,9 +16,9 @@
|
||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
from sahara.plugins import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins.vanilla import plugin as p
|
from sahara.plugins import testutils as tu
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.plugins.vanilla import plugin as p
|
||||||
from sahara.tests.unit import testutils as tu
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class ValidationTest(base.SaharaTestCase):
|
class ValidationTest(base.SaharaTestCase):
|
|
@ -15,36 +15,40 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins.vanilla import confighints_helper as ch_helper
|
from sahara_plugin_vanilla.plugins.vanilla import confighints_helper
|
||||||
from sahara.tests.unit import base as sahara_base
|
from sahara_plugin_vanilla.tests.unit import base as sahara_base
|
||||||
|
|
||||||
|
|
||||||
class ConfigHintsHelperTest(sahara_base.SaharaTestCase):
|
class ConfigHintsHelperTest(sahara_base.SaharaTestCase):
|
||||||
@mock.patch('sahara.utils.xmlutils.load_hadoop_xml_defaults',
|
@mock.patch('sahara.plugins.utils.load_hadoop_xml_defaults',
|
||||||
return_value=[])
|
return_value=[])
|
||||||
def test_get_possible_hive_config_from(self, load_hadoop_xml_defaults):
|
def test_get_possible_hive_config_from(self, load_hadoop_xml_defaults):
|
||||||
expected_config = {
|
expected_config = {
|
||||||
'configs': [],
|
'configs': [],
|
||||||
'params': {}
|
'params': {}
|
||||||
}
|
}
|
||||||
actual_config = ch_helper.get_possible_hive_config_from(
|
actual_config = confighints_helper.get_possible_hive_config_from(
|
||||||
'sample-config.xml')
|
'sample-config.xml')
|
||||||
load_hadoop_xml_defaults.assert_called_once_with('sample-config.xml')
|
load_hadoop_xml_defaults.assert_called_once_with(
|
||||||
|
'sample-config.xml', 'sahara_plugin_vanilla')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.xmlutils.load_hadoop_xml_defaults',
|
@mock.patch('sahara.plugins.utils.load_hadoop_xml_defaults',
|
||||||
|
return_value=[])
|
||||||
|
@mock.patch('sahara.plugins.edp.get_possible_mapreduce_configs',
|
||||||
return_value=[])
|
return_value=[])
|
||||||
def test_get_possible_mapreduce_config_from(
|
def test_get_possible_mapreduce_config_from(
|
||||||
self, load_hadoop_xml_defaults):
|
self, get_possible_mapreduce_configs, load_hadoop_xml_defaults):
|
||||||
expected_config = {
|
expected_config = {
|
||||||
'configs': [],
|
'configs': [],
|
||||||
}
|
}
|
||||||
actual_config = ch_helper.get_possible_mapreduce_config_from(
|
actual_config = confighints_helper.get_possible_mapreduce_config_from(
|
||||||
'sample-config.xml')
|
'sample-config.xml')
|
||||||
load_hadoop_xml_defaults.assert_any_call('sample-config.xml')
|
load_hadoop_xml_defaults.assert_any_call('sample-config.xml',
|
||||||
|
'sahara_plugin_vanilla')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.xmlutils.load_hadoop_xml_defaults',
|
@mock.patch('sahara.plugins.utils.load_hadoop_xml_defaults',
|
||||||
return_value=[])
|
return_value=[])
|
||||||
def test_get_possible_pig_config_from(
|
def test_get_possible_pig_config_from(
|
||||||
self, load_hadoop_xml_defaults):
|
self, load_hadoop_xml_defaults):
|
||||||
|
@ -53,7 +57,8 @@ class ConfigHintsHelperTest(sahara_base.SaharaTestCase):
|
||||||
'args': [],
|
'args': [],
|
||||||
'params': {}
|
'params': {}
|
||||||
}
|
}
|
||||||
actual_config = ch_helper.get_possible_pig_config_from(
|
actual_config = confighints_helper.get_possible_pig_config_from(
|
||||||
'sample-config.xml')
|
'sample-config.xml')
|
||||||
load_hadoop_xml_defaults.assert_called_once_with('sample-config.xml')
|
load_hadoop_xml_defaults.assert_called_once_with(
|
||||||
|
'sample-config.xml', 'sahara_plugin_vanilla')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
|
@ -13,10 +13,10 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from sahara.plugins.vanilla import plugin as p
|
from sahara.plugins import testutils as tu
|
||||||
from sahara.plugins.vanilla import utils as u
|
from sahara_plugin_vanilla.plugins.vanilla import plugin as p
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.plugins.vanilla import utils as u
|
||||||
from sahara.tests.unit import testutils as tu
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestUtils(base.SaharaWithDbTestCase):
|
class TestUtils(base.SaharaWithDbTestCase):
|
|
@ -16,14 +16,14 @@
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla.v2_7_1 import config_helper as v_helper
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1 import config_helper
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestConfigHelper(base.SaharaTestCase):
|
class TestConfigHelper(base.SaharaTestCase):
|
||||||
|
|
||||||
plugin_path = 'sahara.plugins.vanilla.v2_7_1.'
|
plugin_path = 'sahara_plugin_vanilla.plugins.vanilla.v2_7_1.'
|
||||||
plugin_hadoop_path = 'sahara.plugins.vanilla.hadoop2.'
|
plugin_hadoop_path = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestConfigHelper, self).setUp()
|
super(TestConfigHelper, self).setUp()
|
||||||
|
@ -45,29 +45,29 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
configs.extend(PLUGIN_GENERAL_CONFIGS)
|
configs.extend(PLUGIN_GENERAL_CONFIGS)
|
||||||
configs.extend(_get_spark_configs())
|
configs.extend(_get_spark_configs())
|
||||||
configs.extend(_get_zk_configs())
|
configs.extend(_get_zk_configs())
|
||||||
init_configs = v_helper._init_all_configs()
|
init_configs = config_helper._init_all_configs()
|
||||||
self.assertEqual(init_configs, configs)
|
self.assertEqual(init_configs, configs)
|
||||||
|
|
||||||
def test_get_spark_opt_default(self):
|
def test_get_spark_opt_default(self):
|
||||||
opt_name = 'Executor extra classpath'
|
opt_name = 'Executor extra classpath'
|
||||||
_default_executor_classpath = ":".join(
|
_default_executor_classpath = ":".join(
|
||||||
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.1.jar'])
|
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.1.jar'])
|
||||||
default = v_helper._get_spark_opt_default(opt_name)
|
default = config_helper._get_spark_opt_default(opt_name)
|
||||||
self.assertEqual(default, _default_executor_classpath)
|
self.assertEqual(default, _default_executor_classpath)
|
||||||
|
|
||||||
def test_get_spark_configs(self):
|
def test_get_spark_configs(self):
|
||||||
spark_configs = v_helper._get_spark_configs()
|
spark_configs = config_helper._get_spark_configs()
|
||||||
for i in spark_configs:
|
for i in spark_configs:
|
||||||
self.assertIsInstance(i, p.Config)
|
self.assertIsInstance(i, p.Config)
|
||||||
|
|
||||||
def test_get_plugin_configs(self):
|
def test_get_plugin_configs(self):
|
||||||
self.assertEqual(v_helper.get_plugin_configs(),
|
self.assertEqual(config_helper.get_plugin_configs(),
|
||||||
v_helper.PLUGIN_CONFIGS)
|
config_helper.PLUGIN_CONFIGS)
|
||||||
|
|
||||||
def test_get_xml_configs(self):
|
def test_get_xml_configs(self):
|
||||||
self.assertEqual(v_helper.get_xml_configs(),
|
self.assertEqual(config_helper.get_xml_configs(),
|
||||||
v_helper.PLUGIN_XML_CONFIGS)
|
config_helper.PLUGIN_XML_CONFIGS)
|
||||||
|
|
||||||
def test_get_env_configs(self):
|
def test_get_env_configs(self):
|
||||||
self.assertEqual(v_helper.get_env_configs(),
|
self.assertEqual(config_helper.get_env_configs(),
|
||||||
v_helper.ENV_CONFS)
|
config_helper.ENV_CONFS)
|
|
@ -15,14 +15,14 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins.vanilla.v2_7_1 import edp_engine
|
from sahara.plugins import edp
|
||||||
from sahara.tests.unit import base as sahara_base
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1 import edp_engine
|
||||||
from sahara.utils import edp
|
from sahara_plugin_vanilla.tests.unit import base as sahara_base
|
||||||
|
|
||||||
|
|
||||||
class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_hive_config_from',
|
'get_possible_hive_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_hive(
|
def test_get_possible_job_config_hive(
|
||||||
|
@ -34,7 +34,8 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
'plugins/vanilla/v2_7_1/resources/hive-default.xml')
|
'plugins/vanilla/v2_7_1/resources/hive-default.xml')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.hadoop2.edp_engine.EdpOozieEngine')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.edp_engine.'
|
||||||
|
'EdpOozieEngine')
|
||||||
def test_get_possible_job_config_java(self, BaseVanillaEdpOozieEngine):
|
def test_get_possible_job_config_java(self, BaseVanillaEdpOozieEngine):
|
||||||
expected_config = {'job_config': {}}
|
expected_config = {'job_config': {}}
|
||||||
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
||||||
|
@ -46,7 +47,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_mapreduce_config_from',
|
'get_possible_mapreduce_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_mapreduce(
|
def test_get_possible_job_config_mapreduce(
|
||||||
|
@ -59,7 +60,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_mapreduce_config_from',
|
'get_possible_mapreduce_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_mapreduce_streaming(
|
def test_get_possible_job_config_mapreduce_streaming(
|
||||||
|
@ -72,7 +73,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_pig_config_from',
|
'get_possible_pig_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_pig(
|
def test_get_possible_job_config_pig(
|
||||||
|
@ -84,7 +85,8 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')
|
'plugins/vanilla/v2_7_1/resources/mapred-default.xml')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.hadoop2.edp_engine.EdpOozieEngine')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.edp_engine.'
|
||||||
|
'EdpOozieEngine')
|
||||||
def test_get_possible_job_config_shell(self, BaseVanillaEdpOozieEngine):
|
def test_get_possible_job_config_shell(self, BaseVanillaEdpOozieEngine):
|
||||||
expected_config = {'job_config': {}}
|
expected_config = {'job_config': {}}
|
||||||
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
|
@ -17,13 +17,16 @@ import mock
|
||||||
import six
|
import six
|
||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
from sahara.conductor import resource as r
|
from sahara.plugins import base as pb
|
||||||
from sahara.plugins import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins.vanilla.v2_7_1.edp_engine import EdpOozieEngine
|
from sahara.plugins import resource as r
|
||||||
from sahara.plugins.vanilla.v2_7_1.edp_engine import EdpSparkEngine
|
from sahara.plugins import testutils
|
||||||
from sahara.plugins.vanilla.v2_7_1 import versionhandler as v_h
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1.edp_engine import \
|
||||||
from sahara.tests.unit import base
|
EdpOozieEngine
|
||||||
from sahara.tests.unit import testutils
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1.edp_engine import \
|
||||||
|
EdpSparkEngine
|
||||||
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_1 import versionhandler as v_h
|
||||||
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestConfig(object):
|
class TestConfig(object):
|
||||||
|
@ -34,13 +37,15 @@ class TestConfig(object):
|
||||||
|
|
||||||
|
|
||||||
class VersionHandlerTest(base.SaharaTestCase):
|
class VersionHandlerTest(base.SaharaTestCase):
|
||||||
plugin_path = 'sahara.plugins.vanilla.'
|
plugin_path = 'sahara_plugin_vanilla.plugins.vanilla.'
|
||||||
plugin_hadoop2_path = 'sahara.plugins.vanilla.hadoop2.'
|
plugin_hadoop2_path = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(VersionHandlerTest, self).setUp()
|
super(VersionHandlerTest, self).setUp()
|
||||||
self.cluster = mock.Mock()
|
self.cluster = mock.Mock()
|
||||||
self.vh = v_h.VersionHandler()
|
self.vh = v_h.VersionHandler()
|
||||||
|
self.override_config("plugins", ["vanilla"])
|
||||||
|
pb.setup_plugins()
|
||||||
|
|
||||||
def test_get_plugin_configs(self):
|
def test_get_plugin_configs(self):
|
||||||
self.vh.pctx['all_confs'] = 'haha'
|
self.vh.pctx['all_confs'] = 'haha'
|
||||||
|
@ -74,11 +79,11 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
configure_cluster.assert_called_once_with(self.vh.pctx, self.cluster)
|
configure_cluster.assert_called_once_with(self.vh.pctx, self.cluster)
|
||||||
|
|
||||||
@mock.patch(plugin_path + 'v2_7_1.versionhandler.run')
|
@mock.patch(plugin_path + 'v2_7_1.versionhandler.run')
|
||||||
@mock.patch(plugin_path + 'v2_7_1.versionhandler.s_scripts')
|
@mock.patch(plugin_path + 'v2_7_1.versionhandler.starting_scripts')
|
||||||
@mock.patch('sahara.swift.swift_helper.install_ssl_certs')
|
@mock.patch('sahara.plugins.swift_helper.install_ssl_certs')
|
||||||
@mock.patch(plugin_hadoop2_path + 'keypairs.provision_keypairs')
|
@mock.patch(plugin_hadoop2_path + 'keypairs.provision_keypairs')
|
||||||
@mock.patch('sahara.plugins.utils.get_instances')
|
@mock.patch('sahara.plugins.utils.get_instances')
|
||||||
@mock.patch('sahara.utils.cluster.get_instances')
|
@mock.patch('sahara.plugins.utils.cluster_get_instances')
|
||||||
def test_start_cluster(self, c_get_instances, u_get_instances,
|
def test_start_cluster(self, c_get_instances, u_get_instances,
|
||||||
provision_keypairs, install_ssl_certs,
|
provision_keypairs, install_ssl_certs,
|
||||||
s_scripts, run):
|
s_scripts, run):
|
||||||
|
@ -113,7 +118,7 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
cluster,
|
cluster,
|
||||||
instances)
|
instances)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.general.get_by_id')
|
@mock.patch('sahara.plugins.utils.general.get_by_id')
|
||||||
@mock.patch(plugin_hadoop2_path +
|
@mock.patch(plugin_hadoop2_path +
|
||||||
'validation.validate_additional_ng_scaling')
|
'validation.validate_additional_ng_scaling')
|
||||||
@mock.patch(plugin_hadoop2_path +
|
@mock.patch(plugin_hadoop2_path +
|
||||||
|
@ -141,7 +146,7 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
||||||
self.vh.validate_scaling(cluster, existing, {})
|
self.vh.validate_scaling(cluster, existing, {})
|
||||||
|
|
||||||
get_by_id.return_value = r.NodeGroupResource(ng5)
|
get_by_id.return_value = r.create_node_group_resource(ng5)
|
||||||
|
|
||||||
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
||||||
self.vh.validate_scaling(cluster, {}, additional)
|
self.vh.validate_scaling(cluster, {}, additional)
|
||||||
|
@ -158,8 +163,8 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
self.cluster,
|
self.cluster,
|
||||||
instances)
|
instances)
|
||||||
|
|
||||||
@mock.patch("sahara.conductor.API.cluster_update")
|
@mock.patch("sahara.plugins.conductor.cluster_update")
|
||||||
@mock.patch("sahara.context.ctx")
|
@mock.patch("sahara.plugins.context.ctx")
|
||||||
@mock.patch(plugin_path + 'utils.get_namenode')
|
@mock.patch(plugin_path + 'utils.get_namenode')
|
||||||
@mock.patch(plugin_path + 'utils.get_resourcemanager')
|
@mock.patch(plugin_path + 'utils.get_resourcemanager')
|
||||||
@mock.patch(plugin_path + 'utils.get_historyserver')
|
@mock.patch(plugin_path + 'utils.get_historyserver')
|
||||||
|
@ -202,7 +207,7 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
cluster_update.assert_called_once_with(ctx(), self.cluster,
|
cluster_update.assert_called_once_with(ctx(), self.cluster,
|
||||||
{'info': info})
|
{'info': info})
|
||||||
|
|
||||||
@mock.patch("sahara.service.edp.job_utils.get_plugin")
|
@mock.patch("sahara.plugins.edp.get_plugin")
|
||||||
@mock.patch('sahara.plugins.utils.get_instance')
|
@mock.patch('sahara.plugins.utils.get_instance')
|
||||||
@mock.patch('os.path.join')
|
@mock.patch('os.path.join')
|
||||||
def test_get_edp_engine(self, join, get_instance, get_plugin):
|
def test_get_edp_engine(self, join, get_instance, get_plugin):
|
|
@ -16,14 +16,14 @@
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins import provisioning as p
|
from sahara.plugins import provisioning as p
|
||||||
from sahara.plugins.vanilla.v2_7_5 import config_helper as v_helper
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5 import config_helper
|
||||||
from sahara.tests.unit import base
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestConfigHelper(base.SaharaTestCase):
|
class TestConfigHelper(base.SaharaTestCase):
|
||||||
|
|
||||||
plugin_path = 'sahara.plugins.vanilla.v2_7_5.'
|
plugin_path = 'sahara_plugin_vanilla.plugins.vanilla.v2_7_5.'
|
||||||
plugin_hadoop_path = 'sahara.plugins.vanilla.hadoop2.'
|
plugin_hadoop_path = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestConfigHelper, self).setUp()
|
super(TestConfigHelper, self).setUp()
|
||||||
|
@ -45,29 +45,29 @@ class TestConfigHelper(base.SaharaTestCase):
|
||||||
configs.extend(PLUGIN_GENERAL_CONFIGS)
|
configs.extend(PLUGIN_GENERAL_CONFIGS)
|
||||||
configs.extend(_get_spark_configs())
|
configs.extend(_get_spark_configs())
|
||||||
configs.extend(_get_zk_configs())
|
configs.extend(_get_zk_configs())
|
||||||
init_configs = v_helper._init_all_configs()
|
init_configs = config_helper._init_all_configs()
|
||||||
self.assertEqual(init_configs, configs)
|
self.assertEqual(init_configs, configs)
|
||||||
|
|
||||||
def test_get_spark_opt_default(self):
|
def test_get_spark_opt_default(self):
|
||||||
opt_name = 'Executor extra classpath'
|
opt_name = 'Executor extra classpath'
|
||||||
_default_executor_classpath = ":".join(
|
_default_executor_classpath = ":".join(
|
||||||
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.5.jar'])
|
['/opt/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.5.jar'])
|
||||||
default = v_helper._get_spark_opt_default(opt_name)
|
default = config_helper._get_spark_opt_default(opt_name)
|
||||||
self.assertEqual(default, _default_executor_classpath)
|
self.assertEqual(default, _default_executor_classpath)
|
||||||
|
|
||||||
def test_get_spark_configs(self):
|
def test_get_spark_configs(self):
|
||||||
spark_configs = v_helper._get_spark_configs()
|
spark_configs = config_helper._get_spark_configs()
|
||||||
for i in spark_configs:
|
for i in spark_configs:
|
||||||
self.assertIsInstance(i, p.Config)
|
self.assertIsInstance(i, p.Config)
|
||||||
|
|
||||||
def test_get_plugin_configs(self):
|
def test_get_plugin_configs(self):
|
||||||
self.assertEqual(v_helper.get_plugin_configs(),
|
self.assertEqual(config_helper.get_plugin_configs(),
|
||||||
v_helper.PLUGIN_CONFIGS)
|
config_helper.PLUGIN_CONFIGS)
|
||||||
|
|
||||||
def test_get_xml_configs(self):
|
def test_get_xml_configs(self):
|
||||||
self.assertEqual(v_helper.get_xml_configs(),
|
self.assertEqual(config_helper.get_xml_configs(),
|
||||||
v_helper.PLUGIN_XML_CONFIGS)
|
config_helper.PLUGIN_XML_CONFIGS)
|
||||||
|
|
||||||
def test_get_env_configs(self):
|
def test_get_env_configs(self):
|
||||||
self.assertEqual(v_helper.get_env_configs(),
|
self.assertEqual(config_helper.get_env_configs(),
|
||||||
v_helper.ENV_CONFS)
|
config_helper.ENV_CONFS)
|
|
@ -15,14 +15,14 @@
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from sahara.plugins.vanilla.v2_7_5 import edp_engine
|
from sahara.plugins import edp
|
||||||
from sahara.tests.unit import base as sahara_base
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5 import edp_engine
|
||||||
from sahara.utils import edp
|
from sahara_plugin_vanilla.tests.unit import base as sahara_base
|
||||||
|
|
||||||
|
|
||||||
class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_hive_config_from',
|
'get_possible_hive_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_hive(
|
def test_get_possible_job_config_hive(
|
||||||
|
@ -34,7 +34,8 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
'plugins/vanilla/v2_7_5/resources/hive-default.xml')
|
'plugins/vanilla/v2_7_5/resources/hive-default.xml')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.hadoop2.edp_engine.EdpOozieEngine')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.edp_engine.'
|
||||||
|
'EdpOozieEngine')
|
||||||
def test_get_possible_job_config_java(self, BaseVanillaEdpOozieEngine):
|
def test_get_possible_job_config_java(self, BaseVanillaEdpOozieEngine):
|
||||||
expected_config = {'job_config': {}}
|
expected_config = {'job_config': {}}
|
||||||
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
||||||
|
@ -46,7 +47,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_mapreduce_config_from',
|
'get_possible_mapreduce_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_mapreduce(
|
def test_get_possible_job_config_mapreduce(
|
||||||
|
@ -59,7 +60,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_mapreduce_config_from',
|
'get_possible_mapreduce_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_mapreduce_streaming(
|
def test_get_possible_job_config_mapreduce_streaming(
|
||||||
|
@ -72,7 +73,7 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
'sahara.plugins.vanilla.confighints_helper.'
|
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
|
||||||
'get_possible_pig_config_from',
|
'get_possible_pig_config_from',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_get_possible_job_config_pig(
|
def test_get_possible_job_config_pig(
|
||||||
|
@ -84,7 +85,8 @@ class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
|
||||||
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')
|
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')
|
||||||
self.assertEqual(expected_config, actual_config)
|
self.assertEqual(expected_config, actual_config)
|
||||||
|
|
||||||
@mock.patch('sahara.plugins.vanilla.hadoop2.edp_engine.EdpOozieEngine')
|
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.edp_engine.'
|
||||||
|
'EdpOozieEngine')
|
||||||
def test_get_possible_job_config_shell(self, BaseVanillaEdpOozieEngine):
|
def test_get_possible_job_config_shell(self, BaseVanillaEdpOozieEngine):
|
||||||
expected_config = {'job_config': {}}
|
expected_config = {'job_config': {}}
|
||||||
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
|
|
@ -17,13 +17,16 @@ import mock
|
||||||
import six
|
import six
|
||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
from sahara.conductor import resource as r
|
from sahara.plugins import base as pb
|
||||||
from sahara.plugins import exceptions as ex
|
from sahara.plugins import exceptions as ex
|
||||||
from sahara.plugins.vanilla.v2_7_5.edp_engine import EdpOozieEngine
|
from sahara.plugins import resource as r
|
||||||
from sahara.plugins.vanilla.v2_7_5.edp_engine import EdpSparkEngine
|
from sahara.plugins import testutils
|
||||||
from sahara.plugins.vanilla.v2_7_5 import versionhandler as v_h
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5.edp_engine import \
|
||||||
from sahara.tests.unit import base
|
EdpOozieEngine
|
||||||
from sahara.tests.unit import testutils
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5.edp_engine import \
|
||||||
|
EdpSparkEngine
|
||||||
|
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5 import versionhandler as v_h
|
||||||
|
from sahara_plugin_vanilla.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestConfig(object):
|
class TestConfig(object):
|
||||||
|
@ -34,13 +37,15 @@ class TestConfig(object):
|
||||||
|
|
||||||
|
|
||||||
class VersionHandlerTest(base.SaharaTestCase):
|
class VersionHandlerTest(base.SaharaTestCase):
|
||||||
plugin_path = 'sahara.plugins.vanilla.'
|
plugin_path = 'sahara_plugin_vanilla.plugins.vanilla.'
|
||||||
plugin_hadoop2_path = 'sahara.plugins.vanilla.hadoop2.'
|
plugin_hadoop2_path = 'sahara_plugin_vanilla.plugins.vanilla.hadoop2.'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(VersionHandlerTest, self).setUp()
|
super(VersionHandlerTest, self).setUp()
|
||||||
self.cluster = mock.Mock()
|
self.cluster = mock.Mock()
|
||||||
self.vh = v_h.VersionHandler()
|
self.vh = v_h.VersionHandler()
|
||||||
|
self.override_config("plugins", ["vanilla"])
|
||||||
|
pb.setup_plugins()
|
||||||
|
|
||||||
def test_get_plugin_configs(self):
|
def test_get_plugin_configs(self):
|
||||||
self.vh.pctx['all_confs'] = 'haha'
|
self.vh.pctx['all_confs'] = 'haha'
|
||||||
|
@ -74,11 +79,11 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
configure_cluster.assert_called_once_with(self.vh.pctx, self.cluster)
|
configure_cluster.assert_called_once_with(self.vh.pctx, self.cluster)
|
||||||
|
|
||||||
@mock.patch(plugin_path + 'v2_7_5.versionhandler.run')
|
@mock.patch(plugin_path + 'v2_7_5.versionhandler.run')
|
||||||
@mock.patch(plugin_path + 'v2_7_5.versionhandler.s_scripts')
|
@mock.patch(plugin_path + 'v2_7_5.versionhandler.starting_scripts')
|
||||||
@mock.patch('sahara.swift.swift_helper.install_ssl_certs')
|
@mock.patch('sahara.plugins.swift_helper.install_ssl_certs')
|
||||||
@mock.patch(plugin_hadoop2_path + 'keypairs.provision_keypairs')
|
@mock.patch(plugin_hadoop2_path + 'keypairs.provision_keypairs')
|
||||||
@mock.patch('sahara.plugins.utils.get_instances')
|
@mock.patch('sahara.plugins.utils.get_instances')
|
||||||
@mock.patch('sahara.utils.cluster.get_instances')
|
@mock.patch('sahara.plugins.utils.cluster_get_instances')
|
||||||
def test_start_cluster(self, c_get_instances, u_get_instances,
|
def test_start_cluster(self, c_get_instances, u_get_instances,
|
||||||
provision_keypairs, install_ssl_certs,
|
provision_keypairs, install_ssl_certs,
|
||||||
s_scripts, run):
|
s_scripts, run):
|
||||||
|
@ -113,7 +118,7 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
cluster,
|
cluster,
|
||||||
instances)
|
instances)
|
||||||
|
|
||||||
@mock.patch('sahara.utils.general.get_by_id')
|
@mock.patch('sahara.plugins.utils.general.get_by_id')
|
||||||
@mock.patch(plugin_hadoop2_path +
|
@mock.patch(plugin_hadoop2_path +
|
||||||
'validation.validate_additional_ng_scaling')
|
'validation.validate_additional_ng_scaling')
|
||||||
@mock.patch(plugin_hadoop2_path +
|
@mock.patch(plugin_hadoop2_path +
|
||||||
|
@ -141,7 +146,7 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
||||||
self.vh.validate_scaling(cluster, existing, {})
|
self.vh.validate_scaling(cluster, existing, {})
|
||||||
|
|
||||||
get_by_id.return_value = r.NodeGroupResource(ng5)
|
get_by_id.return_value = r.create_node_group_resource(ng5)
|
||||||
|
|
||||||
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
with testtools.ExpectedException(ex.ClusterCannotBeScaled):
|
||||||
self.vh.validate_scaling(cluster, {}, additional)
|
self.vh.validate_scaling(cluster, {}, additional)
|
||||||
|
@ -158,8 +163,8 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
self.cluster,
|
self.cluster,
|
||||||
instances)
|
instances)
|
||||||
|
|
||||||
@mock.patch("sahara.conductor.API.cluster_update")
|
@mock.patch("sahara.plugins.conductor.cluster_update")
|
||||||
@mock.patch("sahara.context.ctx")
|
@mock.patch("sahara.plugins.context.ctx")
|
||||||
@mock.patch(plugin_path + 'utils.get_namenode')
|
@mock.patch(plugin_path + 'utils.get_namenode')
|
||||||
@mock.patch(plugin_path + 'utils.get_resourcemanager')
|
@mock.patch(plugin_path + 'utils.get_resourcemanager')
|
||||||
@mock.patch(plugin_path + 'utils.get_historyserver')
|
@mock.patch(plugin_path + 'utils.get_historyserver')
|
||||||
|
@ -202,7 +207,7 @@ class VersionHandlerTest(base.SaharaTestCase):
|
||||||
cluster_update.assert_called_once_with(ctx(), self.cluster,
|
cluster_update.assert_called_once_with(ctx(), self.cluster,
|
||||||
{'info': info})
|
{'info': info})
|
||||||
|
|
||||||
@mock.patch("sahara.service.edp.job_utils.get_plugin")
|
@mock.patch("sahara.plugins.edp.get_plugin")
|
||||||
@mock.patch('sahara.plugins.utils.get_instance')
|
@mock.patch('sahara.plugins.utils.get_instance')
|
||||||
@mock.patch('os.path.join')
|
@mock.patch('os.path.join')
|
||||||
def test_get_edp_engine(self, join, get_instance, get_plugin):
|
def test_get_edp_engine(self, join, get_instance, get_plugin):
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue