From 5d7b16b5f36a88ad42037907178ddca3bc425490 Mon Sep 17 00:00:00 2001 From: Denis Egorenko Date: Thu, 9 Oct 2014 13:57:05 +0400 Subject: [PATCH] Fix problem with cloud-init on nova-network Add datasource modules, which are not included as default on Ubuntu 14.04. Change-Id: I645ea17c14960e12bf8649526273e2d7c619c25f Closes-bug: #1375645 --- diskimage-create/README.rst | 4 ++++ diskimage-create/diskimage-create.sh | 12 ++++++++++++ 2 files changed, 16 insertions(+) diff --git a/diskimage-create/README.rst b/diskimage-create/README.rst index 01bb83d8..a11bb488 100644 --- a/diskimage-create/README.rst +++ b/diskimage-create/README.rst @@ -58,6 +58,10 @@ Resizing disk space during firstboot on that images fails with errors (https://b For all another images parameter DIB_IMAGE_SIZE will be unset. +`DIB_CLOUD_INIT_DATASOURCES` contains a growing collection of data source modules and most are enabled by default. This causes cloud-init to query each data source +on first boot. This can cause delays or even boot problems depending on your environment. +You must define `DIB_CLOUD_INIT_DATASOURCES` as a comma-separated list of valid data sources to limit the data sources that will be queried for metadata on first boot. + For developers: diff --git a/diskimage-create/diskimage-create.sh b/diskimage-create/diskimage-create.sh index b8191dcc..6c2c8fbf 100755 --- a/diskimage-create/diskimage-create.sh +++ b/diskimage-create/diskimage-create.sh @@ -12,6 +12,9 @@ DEBUG_MODE="false" # The default tag to use for the DIB repo DEFAULT_DIB_REPO_BRANCH="0.1.29" +# Default list of datasource modules for ubuntu. Workaround for bug #1375645 +export CLOUD_INIT_DATASOURCES=${DIB_CLOUD_INIT_DATASOURCES:-"NoCloud, ConfigDrive, OVF, MAAS, Ec2"} + usage() { echo echo "Usage: $(basename $0)" @@ -229,6 +232,8 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "vanilla" ]; then # Ubuntu cloud image if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then + export DIB_CLOUD_INIT_DATASOURCES=$CLOUD_INIT_DATASOURCES + if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "1" ]; then export DIB_HADOOP_VERSION=${DIB_HADOOP_VERSION_1:-"1.2.1"} export ubuntu_image_name=${ubuntu_vanilla_hadoop_1_image_name:-"ubuntu_sahara_vanilla_hadoop_1_latest"} @@ -248,6 +253,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "vanilla" ]; then disk-image-create $ubuntu_elements_sequence -o $ubuntu_image_name mv $ubuntu_image_name.qcow2 ../ fi + unset DIB_CLOUD_INIT_DATASOURCES fi # Fedora cloud image @@ -308,6 +314,8 @@ fi ########################## if [ -z "$PLUGIN" -o "$PLUGIN" = "spark" ]; then + export DIB_CLOUD_INIT_DATASOURCES=$CLOUD_INIT_DATASOURCES + # Ignoring image type and hadoop version options echo "For spark plugin options -i and -v are ignored" @@ -324,6 +332,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "spark" ]; then # Creating Ubuntu cloud image disk-image-create $ubuntu_elements_sequence -o $ubuntu_image_name mv $ubuntu_image_name.qcow2 ../ + unset DIB_CLOUD_INIT_DATASOURCES fi @@ -332,6 +341,8 @@ fi ########################## if [ -z "$PLUGIN" -o "$PLUGIN" = "storm" ]; then + export DIB_CLOUD_INIT_DATASOURCES=$CLOUD_INIT_DATASOURCES + # Ignoring image type and hadoop version options echo "For storm plugin options -i and -v are ignored" @@ -348,6 +359,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "storm" ]; then # Creating Ubuntu cloud image disk-image-create $ubuntu_elements_sequence -o $ubuntu_image_name mv $ubuntu_image_name.qcow2 ../ + unset DIB_CLOUD_INIT_DATASOURCES fi ######################### # Images for HDP plugin #