Renamed all swift-dependent configs to sahara

* Fixed swift helper and 'conf-template.xml' with
  'service.savanna' to 'service.sahara'
* Also fixed configs in HDP plugin
* Integration and unit tests are fixed as well

Change-Id: I611d2200e3cb8071f97fca67a9528388b8b18798
Partially implements: blueprint savanna-renaming-service
This commit is contained in:
Alexander Ignatov 2014-03-12 17:45:42 +04:00
parent f1d7dfa242
commit 0d52cc6b30
15 changed files with 73 additions and 73 deletions

View File

@ -1,6 +1,6 @@
{
"description": "This is input",
"url": "swift://container.savanna/text",
"url": "swift://container.sahara/text",
"credentials": {
"password": "swordfish",
"user": "admin"

View File

@ -234,7 +234,7 @@
"default_value": true,
"description": "savanna provider public attribute",
"is_optional": true,
"name": "fs.swift.service.savanna.public",
"name": "fs.swift.service.sahara.public",
"scope": "cluster"
},
{
@ -243,7 +243,7 @@
"default_value": "8080",
"description": " ",
"is_optional": true,
"name": "fs.swift.service.savanna.http.port",
"name": "fs.swift.service.sahara.http.port",
"scope": "cluster"
},
{
@ -252,7 +252,7 @@
"default_value": "443",
"description": " ",
"is_optional": true,
"name": "fs.swift.service.savanna.https.port",
"name": "fs.swift.service.sahara.https.port",
"scope": "cluster"
}
]

View File

@ -438,11 +438,11 @@
{ "name" : "fs.swift.blocksize", "value" : "32768" },
{ "name" : "fs.swift.partsize", "value" : "4718592" },
{ "name" : "fs.swift.requestsize", "value" : "64" },
{ "name" : "fs.swift.service.savanna.public", "value" : "true" },
{ "name" : "fs.swift.service.savanna.http.port", "value" : "8080" },
{ "name" : "fs.swift.service.savanna.https.port", "value" : "443" },
{ "name" : "fs.swift.service.savanna.auth.url", "value" : "None" },
{ "name" : "fs.swift.service.savanna.tenant", "value" : "None"},
{ "name" : "fs.swift.service.sahara.public", "value" : "true" },
{ "name" : "fs.swift.service.sahara.http.port", "value" : "8080" },
{ "name" : "fs.swift.service.sahara.https.port", "value" : "443" },
{ "name" : "fs.swift.service.sahara.auth.url", "value" : "None" },
{ "name" : "fs.swift.service.sahara.tenant", "value" : "None"},
{ "name" : "hadoop.proxyuser.hive.groups", "value" : "users" },
{ "name" : "hadoop.proxyuser.hive.hosts", "value" : "%HIVE_HOST%" },
{ "name" : "hadoop.proxyuser.hcat.groups", "value" : "users" },

View File

@ -28,8 +28,8 @@ from savanna.utils import xmlutils
conductor = c.API
swift_username = 'fs.swift.service.savanna.username'
swift_password = 'fs.swift.service.savanna.password'
swift_username = 'fs.swift.service.sahara.username'
swift_password = 'fs.swift.service.sahara.password'
class BaseFactory(object):

View File

@ -1,51 +1,51 @@
<configuration>
<!--Mandatory savanna-provider-specific configs (fs.swift.service.savanna prefix)-->
<!--Mandatory sahara-provider-specific configs (fs.swift.service.sahara prefix)-->
<!--username and password should be set for each job, -->
<!--tenant and auth.url would be determined during cluster-creation.-->
<!--During savanna cluster-creation the default configs for ports and public-uri would be set. -->
<!--During sahara cluster-creation the default configs for ports and public-uri would be set. -->
<!--But anyway user-defined configs have the highest priority-->
<property>
<name>fs.swift.service.savanna.auth.url</name>
<name>fs.swift.service.sahara.auth.url</name>
</property>
<property>
<name>fs.swift.service.savanna.tenant</name>
<name>fs.swift.service.sahara.tenant</name>
</property>
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
</property>
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
</property>
<property>
<name>fs.swift.service.savanna.http.port</name>
<name>fs.swift.service.sahara.http.port</name>
<value>8080</value>
</property>
<property>
<name>fs.swift.service.savanna.https.port</name>
<name>fs.swift.service.sahara.https.port</name>
<value>443</value>
</property>
<property>
<name>fs.swift.service.savanna.public</name>
<name>fs.swift.service.sahara.public</name>
<value>true</value>
</property>
<property>
<name>fs.swift.service.savanna.auth.endpoint.prefix</name>
<name>fs.swift.service.sahara.auth.endpoint.prefix</name>
<value>/endpoints/AUTH_</value>
</property>
<!--Not mandatory savanna-provider-specific configs (fs.swift.service.savanna prefix)-->
<!--Not mandatory sahara-provider-specific configs (fs.swift.service.sahara prefix)-->
<property>
<name>fs.swift.service.savanna.region</name>
<name>fs.swift.service.sahara.region</name>
</property>
<property>
<name>fs.swift.service.savanna.apikey</name>
<name>fs.swift.service.sahara.apikey</name>
</property>
<!--General configs, without "savanna" prefix.-->
<!--General configs, without "sahara" prefix.-->
<!--Only fs.swift.impl is mandatory-->
<!--all other configs has default values, so user may override them.-->

View File

@ -24,8 +24,8 @@ from savanna.utils import xmlutils as x
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
HADOOP_SWIFT_AUTH_URL = 'fs.swift.service.savanna.auth.url'
HADOOP_SWIFT_TENANT = 'fs.swift.service.savanna.tenant'
HADOOP_SWIFT_AUTH_URL = 'fs.swift.service.sahara.auth.url'
HADOOP_SWIFT_TENANT = 'fs.swift.service.sahara.tenant'
def _retrieve_tenant():

View File

@ -92,8 +92,8 @@ class EDPTest(base.ITestCase):
self.savanna.data_sources.delete(output_id)
def _add_swift_configs(self, configs):
swift_user = "fs.swift.service.savanna.username"
swift_passw = "fs.swift.service.savanna.password"
swift_user = "fs.swift.service.sahara.username"
swift_passw = "fs.swift.service.sahara.password"
if "configs" not in configs:
configs["configs"] = {}
@ -131,8 +131,8 @@ class EDPTest(base.ITestCase):
lib_binary_list = []
job_binary_internal_list = []
swift_input_url = 'swift://%s.savanna/input' % container_name
swift_output_url = 'swift://%s.savanna/output' % container_name
swift_input_url = 'swift://%s.sahara/input' % container_name
swift_output_url = 'swift://%s.sahara/output' % container_name
# Java jobs don't use data sources. Input/output paths must
# be passed as args with corresponding username/password configs

View File

@ -53,10 +53,10 @@ check_swift_availability() {
sudo -u $HADOOP_USER bash -c "hadoop dfs -copyFromLocal /tmp/test-file /swift-test/"
check_return_code_after_command_execution -clean_hdfs `echo "$?"`
sudo -u $HADOOP_USER bash -c "hadoop distcp -D fs.swift.service.savanna.username=$OS_USERNAME -D fs.swift.service.savanna.tenant=$OS_TENANT_NAME -D fs.swift.service.savanna.password=$OS_PASSWORD /swift-test/test-file swift://$SWIFT_CONTAINER_NAME.savanna/"
sudo -u $HADOOP_USER bash -c "hadoop distcp -D fs.swift.service.sahara.username=$OS_USERNAME -D fs.swift.service.sahara.tenant=$OS_TENANT_NAME -D fs.swift.service.sahara.password=$OS_PASSWORD /swift-test/test-file swift://$SWIFT_CONTAINER_NAME.sahara/"
check_return_code_after_command_execution -clean_hdfs `echo "$?"`
sudo -u $HADOOP_USER bash -c "hadoop distcp -D fs.swift.service.savanna.username=$OS_USERNAME -D fs.swift.service.savanna.tenant=$OS_TENANT_NAME -D fs.swift.service.savanna.password=$OS_PASSWORD swift://$SWIFT_CONTAINER_NAME.savanna/test-file /swift-test/swift-test-file"
sudo -u $HADOOP_USER bash -c "hadoop distcp -D fs.swift.service.sahara.username=$OS_USERNAME -D fs.swift.service.sahara.tenant=$OS_TENANT_NAME -D fs.swift.service.sahara.password=$OS_PASSWORD swift://$SWIFT_CONTAINER_NAME.sahara/test-file /swift-test/swift-test-file"
check_return_code_after_command_execution -clean_hdfs `echo "$?"`
sudo -u $HADOOP_USER bash -c "hadoop dfs -copyToLocal /swift-test/swift-test-file /tmp/swift-test-file"

View File

@ -67,7 +67,7 @@ SAMPLE_JOB_BINARY_DICT = {
"name": "bob",
"tenant_id": "6b859fb8d1f44e8eafdfb91f21309b5f",
"updated_at": "null",
"url": "swift-internal://bob.savanna/job"
"url": "swift-internal://bob.sahara/job"
}
@ -75,7 +75,7 @@ SAMPLE_DATA_SOURCE = {
'name': 'input',
'description': 'some input',
'type': 'swift',
'url': 'swift://tmckay.savanna',
'url': 'swift://tmckay.sahara',
'credentials': {
'username': 'me',
'password': 'password'

View File

@ -599,7 +599,7 @@ class ClusterSpecTest(unittest2.TestCase):
self.assertEqual(
'true',
cluster_config.configurations['core-site']
['fs.swift.service.savanna.location-aware'])
['fs.swift.service.sahara.location-aware'])
self.assertEqual(
'org.apache.hadoop.net.ScriptBasedMapping',
cluster_config.configurations['core-site']

View File

@ -644,7 +644,7 @@ class ClusterSpecTestForHDP2(unittest2.TestCase):
self.assertEqual(
'true',
cluster_config.configurations['core-site']
['fs.swift.service.savanna.location-aware'])
['fs.swift.service.sahara.location-aware'])
self.assertEqual(
'org.apache.hadoop.net.ScriptBasedMapping',
cluster_config.configurations['core-site']

View File

@ -96,8 +96,8 @@ class TestJobManager(base.SavannaWithDbTestCase):
job, job_exec = _create_all_stack('Pig')
job_binary.return_value = {"name": "script.pig"}
input_data = _create_data_source('swift://ex.savanna/i')
output_data = _create_data_source('swift://ex.savanna/o')
input_data = _create_data_source('swift://ex.sahara/i')
output_data = _create_data_source('swift://ex.sahara/o')
creator = workflow_factory.get_creator(job)
@ -105,17 +105,17 @@ class TestJobManager(base.SavannaWithDbTestCase):
input_data, output_data)
self.assertIn("""
<param>INPUT=swift://ex.savanna/i</param>
<param>OUTPUT=swift://ex.savanna/o</param>""", res)
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
@ -129,7 +129,7 @@ class TestJobManager(base.SavannaWithDbTestCase):
job, job_exec = _create_all_stack('Pig')
job_binary.return_value = {"name": "script.pig"}
input_data = _create_data_source('swift://ex.savanna/i')
input_data = _create_data_source('swift://ex.sahara/i')
output_data = _create_data_source('hdfs://user/hadoop/out')
creator = workflow_factory.get_creator(job)
@ -139,17 +139,17 @@ class TestJobManager(base.SavannaWithDbTestCase):
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
input_data = _create_data_source('hdfs://user/hadoop/in')
output_data = _create_data_source('swift://ex.savanna/o')
output_data = _create_data_source('swift://ex.sahara/o')
creator = workflow_factory.get_creator(job)
@ -159,11 +159,11 @@ class TestJobManager(base.SavannaWithDbTestCase):
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
@ -196,8 +196,8 @@ class TestJobManager(base.SavannaWithDbTestCase):
job, job_exec = _create_all_stack(job_type, configs)
input_data = _create_data_source('swift://ex.savanna/i')
output_data = _create_data_source('swift://ex.savanna/o')
input_data = _create_data_source('swift://ex.sahara/i')
output_data = _create_data_source('swift://ex.sahara/o')
creator = workflow_factory.get_creator(job)
@ -214,24 +214,24 @@ class TestJobManager(base.SavannaWithDbTestCase):
self.assertIn("""
<property>
<name>mapred.output.dir</name>
<value>swift://ex.savanna/o</value>
<value>swift://ex.sahara/o</value>
</property>""", res)
self.assertIn("""
<property>
<name>mapred.input.dir</name>
<value>swift://ex.savanna/i</value>
<value>swift://ex.sahara/i</value>
</property>""", res)
self.assertIn("""
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>""", res)
self.assertIn("""
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>""", res)
@ -259,11 +259,11 @@ class TestJobManager(base.SavannaWithDbTestCase):
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
@ -278,8 +278,8 @@ class TestJobManager(base.SavannaWithDbTestCase):
job, job_exec = _create_all_stack('Hive')
job_binary.return_value = {"name": "script.q"}
input_data = _create_data_source('swift://ex.savanna/i')
output_data = _create_data_source('swift://ex.savanna/o')
input_data = _create_data_source('swift://ex.sahara/i')
output_data = _create_data_source('swift://ex.sahara/o')
creator = workflow_factory.get_creator(job)
@ -290,23 +290,23 @@ class TestJobManager(base.SavannaWithDbTestCase):
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.savanna.password</name>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.savanna.username</name>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.savanna/i</param>
<param>OUTPUT=swift://ex.savanna/o</param>""", res)
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
def _build_workflow_with_conf_common(self, job_type):
job, _ = _create_all_stack(job_type)
input_data = _create_data_source('swift://ex.savanna/i')
output_data = _create_data_source('swift://ex.savanna/o')
input_data = _create_data_source('swift://ex.sahara/i')
output_data = _create_data_source('swift://ex.sahara/o')
job_exec = _create_job_exec(job.id,
job_type, configs={"configs": {'c': 'f'}})
@ -325,13 +325,13 @@ class TestJobManager(base.SavannaWithDbTestCase):
self.assertIn("""
<property>
<name>mapred.input.dir</name>
<value>swift://ex.savanna/i</value>
<value>swift://ex.sahara/i</value>
</property>""", res)
self.assertIn("""
<property>
<name>mapred.output.dir</name>
<value>swift://ex.savanna/o</value>
<value>swift://ex.sahara/o</value>
</property>""", res)
def test_build_workflow_for_job_mapreduce_with_conf(self):

View File

@ -41,7 +41,7 @@ class TestJobBinaryValidation(u.ValidationTestCase):
self._assert_create_object_validation(
data={
"name": "j_o_w",
"url": su.SWIFT_INTERNAL_PREFIX+"o.savanna/k"
"url": su.SWIFT_INTERNAL_PREFIX+"o.sahara/k"
},
bad_req_i=(1, "BAD_JOB_BINARY",
"To work with JobBinary located in internal "
@ -52,7 +52,7 @@ class TestJobBinaryValidation(u.ValidationTestCase):
self._assert_create_object_validation(
data={
"name": "j_o_w",
"url": su.OLD_SWIFT_INTERNAL_PREFIX+"o.savanna/k"
"url": su.OLD_SWIFT_INTERNAL_PREFIX+"o.sahara/k"
},
bad_req_i=(1, "BAD_JOB_BINARY",
"To work with JobBinary located in internal "

View File

@ -20,7 +20,7 @@ from savanna.tests.unit import base
GENERAL_PREFIX = "fs.swift."
SERVICE_PREFIX = "service.savanna."
SERVICE_PREFIX = "service.sahara."
GENERAL = ["impl", "connect.timeout", "socket.timeout",
"connect.retry.count", "connect.throttle.delay",
@ -43,7 +43,7 @@ class SwiftIntegrationTestCase(base.SavannaTestCase):
result = h.get_swift_configs()
self.assertEqual(7, len(result))
self.assertIn({'name': "fs.swift.service.savanna.tenant",
self.assertIn({'name': "fs.swift.service.sahara.tenant",
'value': 'test_tenant', 'description': ''}, result)
self.assertIn({'name': "fs.swift.service.savanna.http.port",
self.assertIn({'name': "fs.swift.service.sahara.http.port",
'value': '8080', 'description': ''}, result)

View File

@ -17,7 +17,7 @@
<!--Location awareness for swift -->
<property>
<name>fs.swift.service.savanna.location-aware</name>
<name>fs.swift.service.sahara.location-aware</name>
<value>true</value>
</property>