Merge "Add Key Value Store service test in cdh plugin integration test"

This commit is contained in:
Jenkins
2015-03-02 00:45:26 +00:00
committed by Gerrit Code Review
5 changed files with 123 additions and 2 deletions

View File

@@ -319,7 +319,7 @@ CDH_CONFIG_OPTS = [
'then image for cluster creation will be chosen by '
'tag "sahara_i_tests".'),
cfg.StrOpt('HADOOP_VERSION',
default='5',
default='5.3.0',
help='Version of Hadoop.'),
cfg.StrOpt('HADOOP_USER',
default='hdfs',

View File

@@ -14,9 +14,9 @@
# limitations under the License.
from oslo_utils import excutils
import six
from sahara.tests.integration.tests import base
import six
class CheckServicesTest(base.ITestCase):
@@ -69,3 +69,31 @@ class CheckServicesTest(base.ITestCase):
print(six.text_type(e))
finally:
self.close_ssh_connection()
@base.skip_test('SKIP_CHECK_SERVICES_TEST', message='Test for Services'
' checking was skipped.')
def check_key_value_store_availability(self, cluster_info):
namenode_ip = cluster_info['node_info']['namenode_ip']
self.open_ssh_connection(namenode_ip)
try:
self.transfer_helper_script_to_node('key_value_store_service'
'_test.sh')
self.transfer_helper_conf_file_to_node('key_value_store'
'_indexer.xml')
self.execute_command('./script.sh create_table -ip %s' %
namenode_ip)
self.execute_command('./script.sh create_solr_collection -ip %s' %
namenode_ip)
self.execute_command('./script.sh add_indexer -ip %s' %
namenode_ip)
self.execute_command('./script.sh create_data -ip %s' %
namenode_ip)
self.execute_command('./script.sh check_solr -ip %s' %
namenode_ip)
self.execute_command('./script.sh remove_data -ip %s' %
namenode_ip)
except Exception as e:
with excutils.save_and_reraise_exception():
print(six.text_type(e))
finally:
self.close_ssh_connection()

View File

@@ -313,6 +313,8 @@ class CDHGatingTest(check_services.CheckServicesTest,
self.check_flume_availability(self.cluster_info)
# check sqoop2
self.check_sqoop2_availability(self.cluster_info)
# check key value store
self.check_key_value_store_availability(self.cluster_info)
@b.errormsg("Failure while cluster scaling: ")
def _check_scaling(self):

View File

@@ -0,0 +1,6 @@
<?xml version="1.0"?>
<indexer table="test-keyvalue">
<field name="firstname_s" value="info:firstname"/>
<field name="lastname_s" value="info:lastname"/>
<field name="age_i" value="info:age" type="int"/>
</indexer>

View File

@@ -0,0 +1,85 @@
#!/bin/bash -x
set -e
log=/tmp/key-value-store-test-log.txt
case $1 in
create_table)
FUNC="create_table"
;;
create_solr_collection)
FUNC="create_solr_collection"
;;
add_indexer)
FUNC="add_hbase_indexer"
;;
create_data)
FUNC="create_data"
;;
check_solr)
FUNC="check_solr_query"
;;
remove_data)
FUNC="remove_data"
;;
esac
shift
if [ "$1" = "-ip" ]; then
IP="$2"
else
IP="127.0.0.1"
fi
create_table(){
exec hbase shell << EOF
disable 'test-keyvalue'
drop 'test-keyvalue'
create 'test-keyvalue', { NAME => 'info', REPLICATION_SCOPE => 1 }
exit
EOF
}
create_solr_collection(){
solrctl instancedir --generate $HOME/solr_keyvalue_configs
sleep 3
solrctl instancedir --create keyvalue_collection $HOME/solr_keyvalue_configs
sleep 30
solrctl collection --create keyvalue_collection -s 1 -c keyvalue_collection
sleep 3
}
add_hbase_indexer(){
hbase-indexer add-indexer -n myindexer -c key_value_store_indexer.xml -cp solr.zk=localhost:2181/solr -cp solr.collection=keyvalue_collection
sleep 3
}
create_data(){
exec hbase shell << EOF
put 'test-keyvalue', 'row1', 'info:firstname', 'John'
put 'test-keyvalue', 'row1', 'info:lastname', 'Smith'
exit
EOF
}
remove_data(){
exec hbase shell << EOF
delete 'test-keyvalue', 'row1', 'info:firstname', 'John'
delete 'test-keyvalue', 'row1', 'info:lastname', 'Smith'
exit
EOF
}
check_solr_query(){
sleep 3
if [ `curl "http://$IP:8983/solr/keyvalue_collection_shard1_replica1/select?q=*:*&wt=json&indent=true" | grep "John" | wc -l` -ge 1 ]; then
echo -e "Solr query is Successful. \n" >> $log
exit 0
else
echo -e "Solr query is Failed. \n" >> $log
exit 1
fi
}
$FUNC