Add DataSourceMaaS, a Data Source for Ubuntu Machine as a Service

This commit is contained in:
Scott Moser
2012-03-08 15:02:55 -05:00
10 changed files with 600 additions and 88 deletions

View File

@@ -27,6 +27,7 @@
- DataSourceConfigDrive: support getting data from openstack config drive (LP: #857378)
- DataSourceNoCloud: support seed from external disk of ISO or vfat (LP: #857378)
- DataSourceNoCloud: support inserting /etc/network/interfaces
- DataSourceMaaS: add data source for Ubuntu Machines as a Service (MaaS) (LP: #942061)
0.6.2:
- fix bug where update was not done unless update was explicitly set.
It would not be run if 'upgrade' or packages were set to be installed

View File

@@ -24,7 +24,6 @@ from cloudinit import seeddir as base_seeddir
from cloudinit import log
import cloudinit.util as util
import socket
import urllib2
import time
import boto.utils as boto_utils
import os.path
@@ -134,8 +133,8 @@ class DataSourceEc2(DataSource.DataSource):
url2base[cur] = url
starttime = time.time()
url = wait_for_metadata_service(urls=urls, max_wait=max_wait,
timeout=timeout, status_cb=log.warn)
url = util.wait_for_url(urls=urls, max_wait=max_wait,
timeout=timeout, status_cb=log.warn)
if url:
log.debug("Using metadata source: '%s'" % url2base[url])
@@ -208,87 +207,6 @@ class DataSourceEc2(DataSource.DataSource):
return False
def wait_for_metadata_service(urls, max_wait=None, timeout=None,
status_cb=None):
"""
urls: a list of urls to try
max_wait: roughly the maximum time to wait before giving up
The max time is *actually* len(urls)*timeout as each url will
be tried once and given the timeout provided.
timeout: the timeout provided to urllib2.urlopen
status_cb: call method with string message when a url is not available
the idea of this routine is to wait for the EC2 metdata service to
come up. On both Eucalyptus and EC2 we have seen the case where
the instance hit the MD before the MD service was up. EC2 seems
to have permenantely fixed this, though.
In openstack, the metadata service might be painfully slow, and
unable to avoid hitting a timeout of even up to 10 seconds or more
(LP: #894279) for a simple GET.
Offset those needs with the need to not hang forever (and block boot)
on a system where cloud-init is configured to look for EC2 Metadata
service but is not going to find one. It is possible that the instance
data host (169.254.169.254) may be firewalled off Entirely for a sytem,
meaning that the connection will block forever unless a timeout is set.
"""
starttime = time.time()
sleeptime = 1
def nullstatus_cb(msg):
return
if status_cb == None:
status_cb = nullstatus_cb
def timeup(max_wait, starttime):
return((max_wait <= 0 or max_wait == None) or
(time.time() - starttime > max_wait))
loop_n = 0
while True:
sleeptime = int(loop_n / 5) + 1
for url in urls:
now = time.time()
if loop_n != 0:
if timeup(max_wait, starttime):
break
if timeout and (now + timeout > (starttime + max_wait)):
# shorten timeout to not run way over max_time
timeout = int((starttime + max_wait) - now)
reason = ""
try:
req = urllib2.Request(url)
resp = urllib2.urlopen(req, timeout=timeout)
if resp.read() != "":
return url
reason = "empty data [%s]" % resp.getcode()
except urllib2.HTTPError as e:
reason = "http error [%s]" % e.code
except urllib2.URLError as e:
reason = "url error [%s]" % e.reason
except socket.timeout as e:
reason = "socket timeout [%s]" % e
except Exception as e:
reason = "unexpected error [%s]" % e
if log:
status_cb("'%s' failed [%s/%ss]: %s" %
(url, int(time.time() - starttime), max_wait,
reason))
if timeup(max_wait, starttime):
break
loop_n = loop_n + 1
time.sleep(sleeptime)
return False
datasources = [
(DataSourceEc2, (DataSource.DEP_FILESYSTEM, DataSource.DEP_NETWORK)),
]

342
cloudinit/DataSourceMaaS.py Normal file
View File

@@ -0,0 +1,342 @@
# vi: ts=4 expandtab
#
# Copyright (C) 2009-2010 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Hafliger <juerg.haefliger@hp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cloudinit.DataSource as DataSource
from cloudinit import seeddir as base_seeddir
from cloudinit import log
import cloudinit.util as util
import errno
import oauth.oauth as oauth
import os.path
import urllib2
import time
MD_VERSION = "2012-03-01"
class DataSourceMaaS(DataSource.DataSource):
"""
DataSourceMaaS reads instance information from MaaS.
Given a config metadata_url, and oauth tokens, it expects to find
files under the root named:
instance-id
user-data
hostname
"""
seeddir = base_seeddir + '/maas'
baseurl = None
def __str__(self):
return("DataSourceMaaS[%s]" % self.baseurl)
def get_data(self):
mcfg = self.ds_cfg
try:
(userdata, metadata) = read_maas_seed_dir(self.seeddir)
self.userdata_raw = userdata
self.metadata = metadata
self.baseurl = self.seeddir
return True
except MaasSeedDirNone:
pass
except MaasSeedDirMalformed as exc:
log.warn("%s was malformed: %s\n" % (self.seeddir, exc))
raise
try:
# if there is no metadata_url, then we're not configured
url = mcfg.get('metadata_url', None)
if url == None:
return False
if not self.wait_for_metadata_service(url):
return False
self.baseurl = url
(userdata, metadata) = read_maas_seed_url(self.baseurl,
self.md_headers)
return True
except Exception:
util.logexc(log)
return False
def md_headers(self, url):
mcfg = self.ds_cfg
# if we are missing token_key, token_secret or consumer_key
# then just do non-authed requests
for required in ('token_key', 'token_secret', 'consumer_key'):
if required not in mcfg:
return({})
consumer_secret = mcfg.get('consumer_secret', "")
return(oauth_headers(url=url, consumer_key=mcfg['consumer_key'],
token_key=mcfg['token_key'], token_secret=mcfg['token_secret'],
consumer_secret=consumer_secret))
def wait_for_metadata_service(self, url):
mcfg = self.ds_cfg
max_wait = 120
try:
max_wait = int(mcfg.get("max_wait", max_wait))
except Exception:
util.logexc(log)
log.warn("Failed to get max wait. using %s" % max_wait)
if max_wait == 0:
return False
timeout = 50
try:
timeout = int(mcfg.get("timeout", timeout))
except Exception:
util.logexc(log)
log.warn("Failed to get timeout, using %s" % timeout)
starttime = time.time()
check_url = "%s/instance-id" % url
url = util.wait_for_url(urls=[check_url], max_wait=max_wait,
timeout=timeout, status_cb=log.warn,
headers_cb=self.md_headers)
if url:
log.debug("Using metadata source: '%s'" % url)
else:
log.critical("giving up on md after %i seconds\n" %
int(time.time() - starttime))
return (bool(url))
def read_maas_seed_dir(seed_d):
"""
Return user-data and metadata for a maas seed dir in seed_d.
Expected format of seed_d are the following files:
* instance-id
* local-hostname
* user-data
"""
files = ('local-hostname', 'instance-id', 'user-data')
md = {}
if not os.path.isdir(seed_d):
raise MaasSeedDirNone("%s: not a directory")
for fname in files:
try:
with open(os.path.join(seed_d, fname)) as fp:
md[fname] = fp.read()
fp.close()
except IOError as e:
if e.errno != errno.ENOENT:
raise
return(check_seed_contents(md, seed_d))
def read_maas_seed_url(seed_url, header_cb=None, timeout=None,
version=MD_VERSION):
"""
Read the maas datasource at seed_url.
header_cb is a method that should return a headers dictionary that will
be given to urllib2.Request()
Expected format of seed_url is are the following files:
* <seed_url>/<version>/instance-id
* <seed_url>/<version>/local-hostname
* <seed_url>/<version>/user-data
"""
files = ('meta-data/local-hostname', 'meta-data/instance-id', 'user-data')
base_url = "%s/%s" % (seed_url, version)
md = {}
for fname in files:
url = "%s/%s" % (base_url, fname)
if header_cb:
headers = header_cb(url)
else:
headers = {}
try:
req = urllib2.Request(url, data=None, headers=headers)
resp = urllib2.urlopen(req, timeout=timeout)
md[os.path.basename(fname)] = resp.read()
except urllib2.HTTPError as e:
if e.code != 404:
raise
return(check_seed_contents(md, seed_url))
def check_seed_contents(content, seed):
"""Validate if content is Is the content a dict that is valid as a
return for a datasource.
Either return a (userdata, metadata) tuple or
Raise MaasSeedDirMalformed or MaasSeedDirNone
"""
md_required = ('instance-id', 'local-hostname')
found = content.keys()
if len(content) == 0:
raise MaasSeedDirNone("%s: no data files found" % seed)
missing = [k for k in md_required if k not in found]
if len(missing):
raise MaasSeedDirMalformed("%s: missing files %s" % (seed, missing))
userdata = content.get('user-data', "")
md = {}
for (key, val) in content.iteritems():
if key == 'user-data':
continue
md[key] = val
return(userdata, md)
def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret):
consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
token = oauth.OAuthToken(token_key, token_secret)
params = {
'oauth_version': "1.0",
'oauth_nonce': oauth.generate_nonce(),
'oauth_timestamp': int(time.time()),
'oauth_token': token.key,
'oauth_consumer_key': consumer.key,
}
req = oauth.OAuthRequest(http_url=url, parameters=params)
req.sign_request(oauth.OAuthSignatureMethod_PLAINTEXT(),
consumer, token)
return(req.to_header())
class MaasSeedDirNone(Exception):
pass
class MaasSeedDirMalformed(Exception):
pass
datasources = [
(DataSourceMaaS, (DataSource.DEP_FILESYSTEM, DataSource.DEP_NETWORK)),
]
# return a list of data sources that match this set of dependencies
def get_datasource_list(depends):
return(DataSource.list_from_depends(depends, datasources))
if __name__ == "__main__":
def main():
"""
Call with single argument of directory or http or https url.
If url is given additional arguments are allowed, which will be
interpreted as consumer_key, token_key, token_secret, consumer_secret
"""
import argparse
import pprint
parser = argparse.ArgumentParser(description='Interact with Maas DS')
parser.add_argument("--config", metavar="file",
help="specify DS config file", default=None)
parser.add_argument("--ckey", metavar="key",
help="the consumer key to auth with", default=None)
parser.add_argument("--tkey", metavar="key",
help="the token key to auth with", default=None)
parser.add_argument("--csec", metavar="secret",
help="the consumer secret (likely '')", default="")
parser.add_argument("--tsec", metavar="secret",
help="the token secret to auth with", default=None)
parser.add_argument("--apiver", metavar="version",
help="the apiver to use ("" can be used)", default=MD_VERSION)
subcmds = parser.add_subparsers(title="subcommands", dest="subcmd")
subcmds.add_parser('crawl', help="crawl the datasource")
subcmds.add_parser('get', help="do a single GET of provided url")
subcmds.add_parser('check-seed', help="read andn verify seed at url")
parser.add_argument("url", help="the data source to query")
args = parser.parse_args()
creds = {'consumer_key': args.ckey, 'token_key': args.tkey,
'token_secret': args.tsec, 'consumer_secret': args.csec}
if args.config:
import yaml
with open(args.config) as fp:
cfg = yaml.load(fp)
if 'datasource' in cfg:
cfg = cfg['datasource']['MaaS']
for key in creds.keys():
if key in cfg and creds[key] == None:
creds[key] = cfg[key]
def geturl(url, headers_cb):
req = urllib2.Request(url, data=None, headers=headers_cb(url))
return(urllib2.urlopen(req).read())
def printurl(url, headers_cb):
print "== %s ==\n%s\n" % (url, geturl(url, headers_cb))
def crawl(url, headers_cb=None):
if url.endswith("/"):
for line in geturl(url, headers_cb).splitlines():
if line.endswith("/"):
crawl("%s%s" % (url, line), headers_cb)
else:
printurl("%s%s" % (url, line), headers_cb)
else:
printurl(url, headers_cb)
def my_headers(url):
headers = {}
if creds.get('consumer_key', None) != None:
headers = oauth_headers(url, **creds)
return headers
if args.subcmd == "check-seed":
if args.url.startswith("http"):
(userdata, metadata) = read_maas_seed_url(args.url,
header_cb=my_headers, version=args.apiver)
else:
(userdata, metadata) = read_maas_seed_url(args.url)
print "=== userdata ==="
print userdata
print "=== metadata ==="
pprint.pprint(metadata)
elif args.subcmd == "get":
printurl(args.url, my_headers)
elif args.subcmd == "crawl":
if not args.url.endswith("/"):
args.url = "%s/" % args.url
crawl(args.url, my_headers)
main()

View File

@@ -29,7 +29,7 @@ cfg_env_name = "CLOUD_CFG"
cfg_builtin = """
log_cfgs: []
datasource_list: ["NoCloud", "ConfigDrive", "OVF", "Ec2"]
datasource_list: ["NoCloud", "ConfigDrive", "OVF", "MaaS", "Ec2" ]
def_log_file: /var/log/cloud-init.log
syslog_fix_perms: syslog:adm
"""

View File

@@ -753,3 +753,90 @@ def mount_callback_umount(device, callback, data=None):
_cleanup(umount, tmpd)
return(ret)
def wait_for_url(urls, max_wait=None, timeout=None,
status_cb=None, headers_cb=None):
"""
urls: a list of urls to try
max_wait: roughly the maximum time to wait before giving up
The max time is *actually* len(urls)*timeout as each url will
be tried once and given the timeout provided.
timeout: the timeout provided to urllib2.urlopen
status_cb: call method with string message when a url is not available
headers_cb: call method with single argument of url to get headers
for request.
the idea of this routine is to wait for the EC2 metdata service to
come up. On both Eucalyptus and EC2 we have seen the case where
the instance hit the MD before the MD service was up. EC2 seems
to have permenantely fixed this, though.
In openstack, the metadata service might be painfully slow, and
unable to avoid hitting a timeout of even up to 10 seconds or more
(LP: #894279) for a simple GET.
Offset those needs with the need to not hang forever (and block boot)
on a system where cloud-init is configured to look for EC2 Metadata
service but is not going to find one. It is possible that the instance
data host (169.254.169.254) may be firewalled off Entirely for a sytem,
meaning that the connection will block forever unless a timeout is set.
"""
starttime = time.time()
sleeptime = 1
def nullstatus_cb(msg):
return
if status_cb == None:
status_cb = nullstatus_cb
def timeup(max_wait, starttime):
return((max_wait <= 0 or max_wait == None) or
(time.time() - starttime > max_wait))
loop_n = 0
while True:
sleeptime = int(loop_n / 5) + 1
for url in urls:
now = time.time()
if loop_n != 0:
if timeup(max_wait, starttime):
break
if timeout and (now + timeout > (starttime + max_wait)):
# shorten timeout to not run way over max_time
timeout = int((starttime + max_wait) - now)
reason = ""
try:
if headers_cb != None:
headers = headers_cb(url)
else:
headers = {}
req = urllib2.Request(url, data=None, headers=headers)
resp = urllib2.urlopen(req, timeout=timeout)
if resp.read() != "":
return url
reason = "empty data [%s]" % resp.getcode()
except urllib2.HTTPError as e:
reason = "http error [%s]" % e.code
except urllib2.URLError as e:
reason = "url error [%s]" % e.reason
except socket.timeout as e:
reason = "socket timeout [%s]" % e
except Exception as e:
reason = "unexpected error [%s]" % e
status_cb("'%s' failed [%s/%ss]: %s" %
(url, int(time.time() - starttime), max_wait,
reason))
if timeup(max_wait, starttime):
break
loop_n = loop_n + 1
time.sleep(sleeptime)
return False

View File

@@ -1,7 +1,7 @@
user: ubuntu
disable_root: 1
preserve_hostname: False
# datasource_list: [ "NoCloud", "ConfigDrive", "OVF", "Ec2" ]
# datasource_list: ["NoCloud", "ConfigDrive", "OVF", "MaaS", "Ec2" ]
cloud_init_modules:
- bootcmd

View File

@@ -13,3 +13,14 @@ datasource:
metadata_urls:
- http://169.254.169.254:80
- http://instance-data:8773
MaaS:
timeout : 50
max_wait : 120
# there are no default values for metadata_url or oauth credentials
# If no credentials are present, non-authed attempts will be made.
metadata_url: http://mass-host.localdomain/source
consumer_key: Xh234sdkljf
token_key: kjfhgb3n
token_secret: 24uysdfx1w4

View File

@@ -0,0 +1,151 @@
from tempfile import mkdtemp
from shutil import rmtree
import os
from StringIO import StringIO
from copy import copy
from cloudinit.DataSourceMaaS import (
MaasSeedDirNone,
MaasSeedDirMalformed,
read_maas_seed_dir,
read_maas_seed_url,
)
from mocker import MockerTestCase
class TestMaasDataSource(MockerTestCase):
def setUp(self):
super(TestMaasDataSource, self).setUp()
# Make a temp directoy for tests to use.
self.tmp = mkdtemp(prefix="unittest_")
def tearDown(self):
super(TestMaasDataSource, self).tearDown()
# Clean up temp directory
rmtree(self.tmp)
def test_seed_dir_valid(self):
"""Verify a valid seeddir is read as such"""
data = {'instance-id': 'i-valid01',
'local-hostname': 'valid01-hostname',
'user-data': 'valid01-userdata'}
my_d = os.path.join(self.tmp, "valid")
populate_dir(my_d, data)
(userdata, metadata) = read_maas_seed_dir(my_d)
self.assertEqual(userdata, data['user-data'])
for key in ('instance-id', 'local-hostname'):
self.assertEqual(data[key], metadata[key])
# verify that 'userdata' is not returned as part of the metadata
self.assertFalse(('user-data' in metadata))
def test_seed_dir_valid_extra(self):
"""Verify extra files do not affect seed_dir validity """
data = {'instance-id': 'i-valid-extra',
'local-hostname': 'valid-extra-hostname',
'user-data': 'valid-extra-userdata', 'foo': 'bar'}
my_d = os.path.join(self.tmp, "valid_extra")
populate_dir(my_d, data)
(userdata, metadata) = read_maas_seed_dir(my_d)
self.assertEqual(userdata, data['user-data'])
for key in ('instance-id', 'local-hostname'):
self.assertEqual(data[key], metadata[key])
# additional files should not just appear as keys in metadata atm
self.assertFalse(('foo' in metadata))
def test_seed_dir_invalid(self):
"""Verify that invalid seed_dir raises MaasSeedDirMalformed"""
valid = {'instance-id': 'i-instanceid',
'local-hostname': 'test-hostname', 'user-data': ''}
my_based = os.path.join(self.tmp, "valid_extra")
# missing 'userdata' file
my_d = "%s-01" % my_based
invalid_data = copy(valid)
del invalid_data['local-hostname']
populate_dir(my_d, invalid_data)
self.assertRaises(MaasSeedDirMalformed, read_maas_seed_dir, my_d)
# missing 'instance-id'
my_d = "%s-02" % my_based
invalid_data = copy(valid)
del invalid_data['instance-id']
populate_dir(my_d, invalid_data)
self.assertRaises(MaasSeedDirMalformed, read_maas_seed_dir, my_d)
def test_seed_dir_none(self):
"""Verify that empty seed_dir raises MaasSeedDirNone"""
my_d = os.path.join(self.tmp, "valid_empty")
self.assertRaises(MaasSeedDirNone, read_maas_seed_dir, my_d)
def test_seed_dir_missing(self):
"""Verify that missing seed_dir raises MaasSeedDirNone"""
self.assertRaises(MaasSeedDirNone, read_maas_seed_dir,
os.path.join(self.tmp, "nonexistantdirectory"))
def test_seed_url_valid(self):
"""Verify that valid seed_url is read as such"""
valid = {'meta-data/instance-id': 'i-instanceid',
'meta-data/local-hostname': 'test-hostname',
'user-data': 'foodata'}
my_seed = "http://example.com/xmeta"
my_ver = "1999-99-99"
my_headers = {'header1': 'value1', 'header2': 'value2'}
def my_headers_cb(url):
return(my_headers)
mock_request = self.mocker.replace("urllib2.Request",
passthrough=False)
mock_urlopen = self.mocker.replace("urllib2.urlopen",
passthrough=False)
for (key, val) in valid.iteritems():
mock_request("%s/%s/%s" % (my_seed, my_ver, key),
data=None, headers=my_headers)
self.mocker.nospec()
self.mocker.result("fake-request-%s" % key)
mock_urlopen("fake-request-%s" % key, timeout=None)
self.mocker.result(StringIO(val))
self.mocker.replay()
(userdata, metadata) = read_maas_seed_url(my_seed,
header_cb=my_headers_cb, version=my_ver)
self.assertEqual("foodata", userdata)
self.assertEqual(metadata['instance-id'],
valid['meta-data/instance-id'])
self.assertEqual(metadata['local-hostname'],
valid['meta-data/local-hostname'])
def test_seed_url_invalid(self):
"""Verify that invalid seed_url raises MaasSeedDirMalformed"""
pass
def test_seed_url_missing(self):
"""Verify seed_url with no found entries raises MaasSeedDirNone"""
pass
def populate_dir(seed_dir, files):
os.mkdir(seed_dir)
for (name, content) in files.iteritems():
with open(os.path.join(seed_dir, name), "w") as fp:
fp.write(content)
fp.close()
# vi: ts=4 expandtab

View File

@@ -28,7 +28,7 @@ class TestMergeDict(TestCase):
def test_merge_does_not_override(self):
"""Test that candidate doesn't override source."""
source = {"key1": "value1", "key2": "value2"}
candidate = {"key2": "value2", "key2": "NEW VALUE"}
candidate = {"key1": "value2", "key2": "NEW VALUE"}
result = mergedict(source, candidate)
self.assertEqual(source, result)

View File

@@ -1,6 +1,8 @@
#!/bin/bash
def_files='cloud*.py cloudinit/*.py cloudinit/CloudConfig/*.py'
ci_files='cloud*.py cloudinit/*.py cloudinit/CloudConfig/*.py'
test_files=$(find tests -name "*.py")
def_files="$ci_files $test_files"
if [ $# -eq 0 ]; then
files=( )