Drop support for stable/juno and fedora images

Change-Id: I2e9a62ef15ccfcd425966cd930db89673d63f92f
This commit is contained in:
Denis Egorenko
2015-04-16 16:29:57 +03:00
parent 37b638d265
commit 6391bba243
12 changed files with 47 additions and 283 deletions

View File

@@ -23,7 +23,6 @@ else
USE_NEUTRON=false
fi
VANILLA24_IMAGE_PATH=/home/ubuntu/images/sahara-vanilla-2.4.1-ubuntu-14.04.qcow2
VANILLA26_IMAGE_PATH=/home/ubuntu/images/sahara-vanilla-2.6.0-ubuntu-14.04.qcow2
VANILLA_IMAGE_PATH=/home/ubuntu/images/sahara-vanilla-1.2.1-ubuntu-14.04.qcow2
HDP1_IMAGE_PATH=/home/ubuntu/images/centos_6-6_hdp-1.qcow2
@@ -71,7 +70,6 @@ nova flavor-create --is-public true m1.small 2 1024 20 1
# add images for tests
glance image-create --name ubuntu_vanilla_1_latest --file $VANILLA_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
glance image-create --name ubuntu_vanilla_2.4_latest --file $VANILLA24_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.4.1'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
glance image-create --name ubuntu_vanilla_2.6_latest --file $VANILLA26_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.6.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'='ubuntu'
glance image-create --name sahara_hdp_1_latest --file $HDP1_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.3.2'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'='root'
glance image-create --name sahara_hdp_2_latest --file $HDP2_IMAGE_PATH --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.0.6'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'='root'

View File

@@ -1,67 +0,0 @@
[COMMON]
OS_USERNAME = ci-user
OS_PASSWORD = nova
OS_TENANT_NAME = ci
OS_TENANT_ID = tenantid
OS_AUTH_URL = http://127.0.0.1:5000/v2.0
SAHARA_HOST = localhost
FLAVOR_ID = 20
CLUSTER_CREATION_TIMEOUT = 60
CLUSTER_NAME = name
FLOATING_IP_POOL = public
NEUTRON_ENABLED = false
INTERNAL_NEUTRON_NETWORK = private
JOB_LAUNCH_TIMEOUT = 15
HDFS_INITIALIZATION_TIMEOUT = 10
[VANILLA]
IMAGE_NAME = vanilla_image
SKIP_ALL_TESTS_FOR_PLUGIN = False
SKIP_CINDER_TEST = False
SKIP_CLUSTER_CONFIG_TEST = False
SKIP_EDP_TEST = False
SKIP_MAP_REDUCE_TEST = True
SKIP_SWIFT_TEST = True
SKIP_SCALING_TEST = False
SKIP_TRANSIENT_CLUSTER_TEST = True
ONLY_TRANSIENT_CLUSTER_TEST = False
[VANILLA_TWO]
IMAGE_NAME = vanilla_two_image
SKIP_ALL_TESTS_FOR_PLUGIN = False
SKIP_CINDER_TEST = False
SKIP_MAP_REDUCE_TEST = True
SKIP_SWIFT_TEST = True
SKIP_SCALING_TEST = False
SKIP_EDP_TEST = False
[HDP]
IMAGE_NAME = hdp_image
SKIP_ALL_TESTS_FOR_PLUGIN = False
SKIP_CINDER_TEST = False
SKIP_MAP_REDUCE_TEST = True
SKIP_SWIFT_TEST = True
SKIP_SCALING_TEST = False
SKIP_EDP_TEST = False
[HDP2]
IMAGE_NAME = hdp_two_image
SKIP_ALL_TESTS_FOR_PLUGIN = False
SKIP_SWIFT_TEST = True
SKIP_SCALING_TEST = False
SKIP_EDP_TEST = False
[CDH]
IMAGE_NAME = cdh_image
SKIP_ALL_TESTS_FOR_PLUGIN = False
SKIP_CINDER_TEST = False
SKIP_MAP_REDUCE_TEST = True
SKIP_SWIFT_TEST = True
SKIP_SCALING_TEST = False
SKIP_EDP_TEST = False
[SPARK]
IMAGE_NAME = spark_image
SKIP_ALL_TESTS_FOR_PLUGIN = False
SKIP_SCALING_TEST = False
SKIP_EDP_TEST = False

View File

@@ -3,7 +3,6 @@ credentials:
os_password: %OS_PASSWORD%
os_tenant: %OS_TENANT_NAME%
os_auth_url: http://%OPENSTACK_HOST%:5000/v2.0
sahara_url: http://localhost:8386/v1.1/%TENANT_ID%
network:
type: %NETWORK%

View File

@@ -28,7 +28,6 @@ projects:
- name: openstack/sahara
check:
- gate-sahara-neutron-direct-vanilla_1-aio
- gate-sahara-neutron-heat-vanilla_2.4
- gate-sahara-neutron-heat-vanilla_2.6
- gate-sahara-neutron-direct-spark-aio
- gate-sahara-neutron-direct-transient
@@ -43,7 +42,6 @@ projects:
- name: openstack/python-saharaclient
check:
- gate-saharaclient-neutron-direct-vanilla_1-aio
- gate-saharaclient-neutron-heat-vanilla_2.4
- gate-saharaclient-neutron-heat-vanilla_2.6
- gate-saharaclient-neutron-direct-spark-aio
- gate-saharaclient-neutron-direct-transient
@@ -68,9 +66,6 @@ projects:
- dib-neutron-direct-vanilla_1-ubuntu-aio
- dib-neutron-direct-vanilla_1-fedora-aio
- dib-neutron-direct-vanilla_1-centos-aio
- dib-neutron-heat-vanilla_2.4-ubuntu
- dib-neutron-heat-vanilla_2.4-fedora
- dib-neutron-heat-vanilla_2.4-centos
- dib-neutron-heat-vanilla_2.6-ubuntu
- dib-neutron-heat-vanilla_2.6-fedora
- dib-neutron-heat-vanilla_2.6-centos
@@ -83,9 +78,6 @@ projects:
- dib-neutron-direct-vanilla_1-ubuntu-aio
- dib-neutron-direct-vanilla_1-fedora-aio
- dib-neutron-direct-vanilla_1-centos-aio
- dib-neutron-heat-vanilla_2.4-ubuntu
- dib-neutron-heat-vanilla_2.4-fedora
- dib-neutron-heat-vanilla_2.4-centos
- dib-neutron-heat-vanilla_2.6-ubuntu
- dib-neutron-heat-vanilla_2.6-fedora
- dib-neutron-heat-vanilla_2.6-centos
@@ -101,29 +93,15 @@ projects:
jobs:
- name: ^.*$
parameter-function: set_ci_tenant
branch: ^(master|stable/juno|proposed/kilo)$
- name: ^dib-neutron-heat-vanilla_2.4-.*$
branch: ^(stable/juno)$
- name: ^dib-neutron-heat-vanilla_2.6-.*$
skip-if:
- branch: ^(stable/juno)$
- name: ^gate-.*-vanilla_2.4$
branch: ^(stable/juno)$
parameter-function: set_params
branch: ^(master|stable/kilo)$
- name: ^gate-sahara.*$
skip-if:
- all-files-match-any:
- ^doc/.*$
- ^.*\.rst$
- name: ^gate-.*-vanilla_2.6$
skip-if:
- all-files-match-any:
- ^doc/.*$
- ^.*\.rst$
- branch: ^(stable/juno)$
- name: ^tempest-sahara.*$
skip-if:
- branch: ^stable/juno$
- all-files-match-any:
- ^doc/.*$
- ^.*\.rst$

View File

@@ -35,8 +35,6 @@ def single_use_node(item, job, params):
params['OFFLINE_NODE_WHEN_COMPLETE'] = '1'
def set_ci_tenant(item, job, params):
def set_params(item, job, params):
single_use_node(item, job, params)
params['NEUTRON_LAB_TENANT_ID'] = '-NEUTRON_LAB_TENANT_ID-'
params['NOVA_NET_LAB_TENANT_ID'] = '-NOVA_NET_LAB_TENANT_ID-'
params['CLUSTER_HASH'] = str(uuid.uuid4()).split('-')[0]

View File

@@ -77,7 +77,6 @@
- vanilla_1
os:
- ubuntu
- fedora
- centos
network:
- neutron
@@ -87,14 +86,10 @@
- 'dib-{network}-direct-{plugin}-{os}-aio':
network: nova
plugin: cdh
os:
- ubuntu
- centos
- 'dib-nova-direct-{plugin}-aio':
plugin:
- spark
- hdp_1
- 'dib-neutron-heat-{plugin}-{os}':
plugin:
- vanilla_2.4
- vanilla_2.6

View File

@@ -107,7 +107,6 @@
github-org: openstack
plugin-neutron:
- direct-vanilla_1-aio
- heat-vanilla_2.4
- heat-vanilla_2.6
- direct-spark-aio
- direct-transient

View File

@@ -10,9 +10,6 @@ CLUSTER_HASH=${CLUSTER_HASH:-$RANDOM}
cluster_name="$HOST-$ZUUL_CHANGE-$CLUSTER_HASH"
SAHARA_PATH="/tmp/sahara"
# default (deprecated) config file for integration tests
tests_config_file="$SAHARA_PATH/sahara/tests/integration/configs/itest.conf"
tests_config_file_template="$sahara_templates_configs_path/itest.conf.sample"
sahara_conf_path="$SAHARA_PATH/etc/sahara/sahara.conf"
engine=$(echo $JOB_NAME | awk -F '-' '{ print $3 }')
@@ -21,7 +18,6 @@ image_type=${2:-ubuntu}
# Image names
vanilla_image=$HOST-sahara-vanilla-${image_type}-${ZUUL_CHANGE}-hadoop_1
vanilla_two_four_image=$HOST-sahara-vanilla-${image_type}-${ZUUL_CHANGE}-hadoop_2.4
vanilla_two_six_image=$HOST-sahara-vanilla-${image_type}-${ZUUL_CHANGE}-hadoop_2.6
hdp_image=$HOST-sahara-hdp-centos-${ZUUL_CHANGE}-hadoop_1
hdp_two_image=$HOST-sahara-hdp-centos-${ZUUL_CHANGE}-hadoop_2
@@ -36,45 +32,20 @@ case $job_type in
if [ "${image_type}" == 'centos' ]; then
username='cloud-user'
else
username=${image_type}
if [ "$image_type" == "fedora" ]; then
# This fix is workaround for problem with downloading fedora
# from cloud.fedoraproject.org
# we use the same version of fedora that is in DIB
FEDORA_OPTS="DIB_CLOUD_IMAGES=http://172.18.168.44/images BASE_IMAGE_FILE=fedora.x86_64.qcow2"
fi
username='ubuntu'
fi
hadoop_version=$(echo $job_type | awk -F '_' '{print $2}')
case $hadoop_version in
1)
env ${image_type}_vanilla_hadoop_1_image_name=${vanilla_image} $FEDORA_OPTS SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 1
env ${image_type}_vanilla_hadoop_1_image_name=${vanilla_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 1
check_error_code $? ${vanilla_image}.qcow2
upload_image "vanilla-1" "${username}" ${vanilla_image}
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template VANILLA SKIP_CINDER_TEST True
insert_config_value $tests_config_file_template VANILLA SKIP_CLUSTER_CONFIG_TEST True
insert_config_value $tests_config_file_template VANILLA SKIP_SCALING_TEST True
insert_config_value $tests_config_file_template VANILLA IMAGE_NAME $vanilla_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-vanilla-1.2.1.yaml"
insert_scenario_value $tests_config_file vanilla_image
fi
plugin=vanilla1
;;
2.4)
env ${image_type}_vanilla_hadoop_2_4_image_name=${vanilla_two_four_image} $FEDORA_OPTS SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.4
check_error_code $? ${vanilla_two_four_image}.qcow2
upload_image "vanilla-2.4" "${username}" ${vanilla_two_four_image}
DISTRIBUTE_MODE=True
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_CINDER_TEST True
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_CLUSTER_CONFIG_TEST True
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_SCALING_TEST True
insert_config_value $tests_config_file_template VANILLA_TWO IMAGE_NAME $vanilla_two_four_image
plugin=vanilla2
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-vanilla-1.2.1.yaml"
insert_scenario_value $tests_config_file vanilla_image
;;
2.6)
env ${image_type}_vanilla_hadoop_2_6_image_name=${vanilla_two_six_image} $FEDORA_OPTS SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.6
env ${image_type}_vanilla_hadoop_2_6_image_name=${vanilla_two_six_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.6
check_error_code $? ${vanilla_two_six_image}.qcow2
upload_image "vanilla-2.6" "${username}" ${vanilla_two_six_image}
DISTRIBUTE_MODE=True
@@ -88,16 +59,8 @@ case $job_type in
env ubuntu_spark_image_name=${spark_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p spark
check_error_code $? ${spark_image}.qcow2
upload_image "spark" "ubuntu" ${spark_image}
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template SPARK SKIP_CINDER_TEST True
insert_config_value $tests_config_file_template SPARK SKIP_CLUSTER_CONFIG_TEST True
insert_config_value $tests_config_file_template SPARK SKIP_SCALING_TEST True
insert_config_value $tests_config_file_template SPARK IMAGE_NAME $spark_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-spark.yaml"
insert_scenario_value $tests_config_file spark_image
fi
plugin=spark
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-spark.yaml"
insert_scenario_value $tests_config_file spark_image
insert_config_value $sahara_conf_path DEFAULT plugins spark
;;
@@ -105,16 +68,8 @@ case $job_type in
env centos_hdp_hadoop_1_image_name=${hdp_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p hdp -v 1
check_error_code $? ${hdp_image}.qcow2
upload_image "hdp1" "root" ${hdp_image}
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template HDP SKIP_CINDER_TEST True
insert_config_value $tests_config_file_template HDP SKIP_CLUSTER_CONFIG_TEST True
insert_config_value $tests_config_file_template HDP SKIP_SCALING_TEST True
insert_config_value $tests_config_file_template HDP IMAGE_NAME $hdp_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp.yaml"
insert_scenario_value $tests_config_file hdp_image
fi
plugin=hdp1
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp.yaml"
insert_scenario_value $tests_config_file hdp_image
;;
hdp_2)
@@ -122,16 +77,8 @@ case $job_type in
check_error_code $? ${hdp_two_image}.qcow2
upload_image "hdp2" "root" ${hdp_two_image}
DISTRIBUTE_MODE=True
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template HDP2 SKIP_CINDER_TEST True
insert_config_value $tests_config_file_template HDP2 SKIP_CLUSTER_CONFIG_TEST True
insert_config_value $tests_config_file_template HDP2 SKIP_SCALING_TEST True
insert_config_value $tests_config_file_template HDP2 IMAGE_NAME $hdp_two_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp-2.yaml"
insert_scenario_value $tests_config_file hdp_two_image
fi
plugin=hdp2
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp-2.yaml"
insert_scenario_value $tests_config_file hdp_two_image
;;
cdh)
@@ -144,16 +91,8 @@ case $job_type in
check_error_code $? ${cdh_image}.qcow2
upload_image "cdh" ${username} ${cdh_image}
insert_config_value $sahara_conf_path DEFAULT plugins cdh
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template CDH SKIP_CINDER_TEST True
insert_config_value $tests_config_file_template CDH SKIP_CLUSTER_CONFIG_TEST True
insert_config_value $tests_config_file_template CDH SKIP_SCALING_TEST True
insert_config_value $tests_config_file_template CDH IMAGE_NAME $cdh_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-cdh.yaml"
insert_scenario_value $tests_config_file cdh_image
fi
plugin=cdh
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-cdh.yaml"
insert_scenario_value $tests_config_file cdh_image
;;
esac
@@ -166,6 +105,6 @@ sudo pip install .
enable_pypi
write_sahara_main_conf "$sahara_conf_path" "$engine"
write_tests_conf "$tests_config_file" "$cluster_name"
start_sahara "$sahara_conf_path" && run_tests "$tests_config_file" "$plugin"
start_sahara "$sahara_conf_path" && run_tests "$tests_config_file"
print_python_env
cleanup_image "$job_type" "$image_type"

View File

@@ -108,31 +108,16 @@ write_tests_conf() {
local addr=$(ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}')
if [ "$USE_NEUTRON" == "true" ]; then
NETWORK="neutron"
TENANT_ID=$NEUTRON_LAB_TENANT_ID
else
NETWORK="nova-network"
TENANT_ID=$NOVA_NET_LAB_TENANT_ID
fi
if [ "$ZUUL_BRANCH" == "master" -o "$ZUUL_BRANCH" == "proposed/kilo" ]; then
local test_scenario_common=$(dirname $1)/scenario-common.yaml
insert_scenario_value $test_scenario_common OS_USERNAME
insert_scenario_value $test_scenario_common OS_PASSWORD
insert_scenario_value $test_scenario_common OS_TENANT_NAME
insert_scenario_value $test_scenario_common OPENSTACK_HOST
insert_scenario_value $test_scenario_common NETWORK
insert_scenario_value $test_scenario_common TENANT_ID
insert_scenario_value $test_conf cluster_name
else
cp $sahara_templates_configs_path/itest.conf.sample $test_conf
insert_config_value $test_conf COMMON OS_USERNAME $OS_USERNAME
insert_config_value $test_conf COMMON OS_PASSWORD $OS_PASSWORD
insert_config_value $test_conf COMMON OS_TENANT_NAME $OS_TENANT_NAME
insert_config_value $test_conf COMMON OS_TENANT_ID $TENANT_ID
insert_config_value $test_conf COMMON OS_AUTH_URL $OS_AUTH_URL
insert_config_value $test_conf COMMON NEUTRON_ENABLED $USE_NEUTRON
insert_config_value $test_conf COMMON SAHARA_HOST $addr
insert_config_value $test_conf COMMON CLUSTER_NAME $cluster_name
fi
local test_scenario_common=$(dirname $1)/scenario-common.yaml
insert_scenario_value $test_scenario_common OS_USERNAME
insert_scenario_value $test_scenario_common OS_PASSWORD
insert_scenario_value $test_scenario_common OS_TENANT_NAME
insert_scenario_value $test_scenario_common OPENSTACK_HOST
insert_scenario_value $test_scenario_common NETWORK
insert_scenario_value $test_conf cluster_name
echo "----------- tests config -----------"
cat $test_conf
@@ -141,19 +126,13 @@ write_tests_conf() {
run_tests() {
local config=$1
local plugin=$2
local concurrency=${3:-"1"}
local concurrency=${2:-"1"}
echo "Integration tests are started"
export PYTHONUNBUFFERED=1
if [ "$ZUUL_BRANCH" == "master" -o "$ZUUL_BRANCH" == "proposed/kilo" ]
then
local scenario_common=$(dirname $1)/scenario-common.yaml
# Temporary use additional log file, due to wrong status code from tox scenario tests
# tox -e scenario $scenario_common $config || failure "Integration tests are failed"
tox -e scenario $scenario_common $config | tee tox.log
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
if [ "$STATUS" != "0" ]; then failure "Integration tests have failed"; fi
else
tox -e integration -- $plugin --concurrency=$concurrency || failure "Integration tests have failed"
fi
local scenario_common=$(dirname $1)/scenario-common.yaml
# Temporary use additional log file, due to wrong status code from tox scenario tests
# tox -e scenario $scenario_common $config || failure "Integration tests are failed"
tox -e scenario $scenario_common $config | tee tox.log
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
if [ "$STATUS" != "0" ]; then failure "Integration tests have failed"; fi
}

View File

@@ -10,9 +10,6 @@ cluster_name="$HOST-$ZUUL_CHANGE-$CLUSTER_HASH"
SAHARA_PATH=${1:-$WORKSPACE}
sahara_conf_path=$SAHARA_PATH/etc/sahara/sahara.conf
# default (deprecated) config file for integration tests
tests_config_file="$SAHARA_PATH/sahara/tests/integration/configs/itest.conf"
tests_config_file_template="$sahara_templates_configs_path/itest.conf.sample"
job_type=$(echo $JOB_NAME | awk -F '-' '{ print $5 }')
engine_type=$(echo $JOB_NAME | awk -F '-' '{ print $4 }')
@@ -21,7 +18,6 @@ engine_type=$(echo $JOB_NAME | awk -F '-' '{ print $4 }')
hdp_image=sahara_hdp_1_latest
hdp_two_image=sahara_hdp_2_latest
vanilla_image=ubuntu_vanilla_1_latest
vanilla_two_four_image=ubuntu_vanilla_2.4_latest
vanilla_two_six_image=ubuntu_vanilla_2.6_latest
spark_image=sahara_spark_latest
cdh_centos_image=centos_cdh_latest
@@ -29,39 +25,17 @@ cdh_ubuntu_image=ubuntu_cdh_latest
case $job_type in
hdp_1)
plugin=hdp1
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template HDP IMAGE_NAME $hdp_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp.yaml"
insert_scenario_value $tests_config_file hdp_image
fi
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp.yaml"
insert_scenario_value $tests_config_file hdp_image
;;
hdp_2)
DISTRIBUTE_MODE=True
plugin=hdp2
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template HDP2 IMAGE_NAME $hdp_two_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp-2.yaml"
insert_scenario_value $tests_config_file hdp_two_image
fi
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-hdp-2.yaml"
insert_scenario_value $tests_config_file hdp_two_image
;;
vanilla_1)
plugin=vanilla1
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template VANILLA IMAGE_NAME $vanilla_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-vanilla-1.2.1.yaml"
insert_scenario_value $tests_config_file vanilla_image
fi
;;
vanilla_2.4)
DISTRIBUTE_MODE=True
plugin=vanilla2
insert_config_value $tests_config_file_template VANILLA_TWO IMAGE_NAME $vanilla_two_four_image
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_VERSION 2.4.1
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_EXAMPLES_JAR_PATH "/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.4.1.jar"
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-vanilla-1.2.1.yaml"
insert_scenario_value $tests_config_file vanilla_image
;;
vanilla_2.6)
DISTRIBUTE_MODE=True
@@ -69,44 +43,25 @@ case $job_type in
insert_scenario_value $tests_config_file vanilla_two_six_image
;;
transient)
plugin=transient
concurrency=3
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_TRANSIENT_CLUSTER_TEST False
insert_config_value $tests_config_file_template VANILLA_TWO ONLY_TRANSIENT_CLUSTER_TEST True
insert_config_value $tests_config_file_template VANILLA_TWO IMAGE_NAME $vanilla_two_four_image
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_VERSION 2.4.1
else
DISTRIBUTE_MODE=True
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-transient.yaml"
insert_scenario_value $tests_config_file vanilla_two_six_image
fi
DISTRIBUTE_MODE=True
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-transient.yaml"
insert_scenario_value $tests_config_file vanilla_two_six_image
;;
cdh*)
plugin=cdh
insert_config_value $sahara_conf_path DEFAULT plugins cdh
if [[ "$job_type" =~ centos ]]; then
cdh_image=$cdh_centos_image
else
cdh_image=$cdh_ubuntu_image
fi
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template CDH IMAGE_NAME $cdh_image
insert_config_value $tests_config_file_template CDH SKIP_SCALING_TEST True
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-cdh.yaml"
insert_scenario_value $tests_config_file cdh_image
fi
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-cdh.yaml"
insert_scenario_value $tests_config_file cdh_image
;;
spark)
plugin=spark
insert_config_value $sahara_conf_path DEFAULT plugins spark
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
insert_config_value $tests_config_file_template SPARK IMAGE_NAME $spark_image
else
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-spark.yaml"
insert_scenario_value $tests_config_file spark_image
fi
tests_config_file="$sahara_templates_configs_path/scenario/sahara-scenario-spark.yaml"
insert_scenario_value $tests_config_file spark_image
;;
esac
echo "$plugin detected"
@@ -116,5 +71,5 @@ sudo pip install .
enable_pypi
write_sahara_main_conf "$sahara_conf_path" "$engine_type"
write_tests_conf "$tests_config_file" "$cluster_name"
start_sahara "$sahara_conf_path" && run_tests "$tests_config_file" "$plugin" "$concurrency"
start_sahara "$sahara_conf_path" && run_tests "$tests_config_file" "$concurrency"
print_python_env

View File

@@ -3,14 +3,7 @@
git clone https://git.openstack.org/openstack/sahara /tmp/sahara
cd /tmp/sahara
if [ "$ZUUL_BRANCH" == "master" -o "$ZUUL_BRANCH" == "proposed/kilo" ]; then
tox -e scenario --notest
.tox/scenario/bin/pip install $WORKSPACE
else
git checkout "$ZUUL_BRANCH"
sudo pip install -U -r requirements.txt
tox -e integration --notest
.tox/integration/bin/pip install $WORKSPACE
fi
tox -e scenario --notest
.tox/scenario/bin/pip install $WORKSPACE
$WORKSPACE/sahara-ci-config/slave-scripts/gate-sahara.sh /tmp/sahara

View File

@@ -1,8 +1,6 @@
#!/bin/bash -e
source $JENKINS_HOME/credentials
sed "s%-NEUTRON_LAB_TENANT_ID-%$NEUTRON_LAB_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
sed "s%-NOVA_NET_LAB_TENANT_ID-%$NOVA_NET_LAB_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
sudo su - jenkins -c "cat $WORKSPACE/slave-scripts/credentials.conf > /etc/jenkins_jobs/credentials.conf"
sudo su - zuul -c "cat $WORKSPACE/config/zuul/zuul.conf > /etc/zuul/zuul.conf"