2010-05-27 23:05:26 -07:00
|
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
2010-06-23 22:04:16 -07:00
|
|
|
|
|
|
|
# Copyright 2010 United States Government as represented by the
|
2010-06-23 23:15:06 -07:00
|
|
|
# Administrator of the National Aeronautics and Space Administration.
|
2011-02-23 12:05:49 -08:00
|
|
|
# Copyright 2011 Justin Santa Barbara
|
2010-06-23 22:04:16 -07:00
|
|
|
# All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
2010-05-27 23:05:26 -07:00
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
2010-06-23 22:04:16 -07:00
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Utilities and helper functions."""
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2011-10-10 17:58:56 -05:00
|
|
|
import contextlib
|
2010-08-16 14:16:21 +02:00
|
|
|
import datetime
|
2011-03-01 20:49:46 +01:00
|
|
|
import functools
|
2012-01-09 18:21:57 +11:00
|
|
|
import hashlib
|
2010-07-08 10:53:27 -07:00
|
|
|
import inspect
|
2010-07-07 12:24:24 -07:00
|
|
|
import os
|
2012-01-10 11:22:33 -06:00
|
|
|
import pyclbr
|
2010-05-27 23:05:26 -07:00
|
|
|
import random
|
2011-03-01 20:49:46 +01:00
|
|
|
import re
|
2012-02-28 05:54:48 +00:00
|
|
|
import shutil
|
2010-07-14 16:27:18 -05:00
|
|
|
import socket
|
2010-11-23 21:48:32 +00:00
|
|
|
import struct
|
2010-07-08 10:53:27 -07:00
|
|
|
import sys
|
2012-02-28 05:54:48 +00:00
|
|
|
import tempfile
|
2010-12-15 11:23:33 -08:00
|
|
|
import time
|
2010-10-25 19:21:09 +09:00
|
|
|
from xml.sax import saxutils
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2013-05-14 10:49:15 -04:00
|
|
|
import eventlet
|
2011-12-29 10:55:42 -05:00
|
|
|
import netaddr
|
2011-04-20 12:08:22 -07:00
|
|
|
|
2013-02-15 22:30:16 +00:00
|
|
|
from oslo.config import cfg
|
|
|
|
|
2010-07-28 12:53:27 -07:00
|
|
|
from nova import exception
|
2012-05-01 17:31:26 -04:00
|
|
|
from nova.openstack.common import excutils
|
2012-04-27 14:14:18 -04:00
|
|
|
from nova.openstack.common import importutils
|
2013-05-11 21:25:16 +08:00
|
|
|
from nova.openstack.common import lockutils
|
2012-06-28 15:59:23 -05:00
|
|
|
from nova.openstack.common import log as logging
|
2013-05-03 10:47:39 +10:00
|
|
|
from nova.openstack.common import processutils
|
2013-01-30 11:17:33 -08:00
|
|
|
from nova.openstack.common.rpc import common as rpc_common
|
2012-06-06 10:32:49 +08:00
|
|
|
from nova.openstack.common import timeutils
|
2010-06-10 18:39:07 +01:00
|
|
|
|
2013-01-31 12:55:19 +00:00
|
|
|
notify_decorator = 'nova.openstack.common.notifier.api.notify_decorator'
|
|
|
|
|
2012-12-11 08:09:41 +00:00
|
|
|
monkey_patch_opts = [
|
|
|
|
cfg.BoolOpt('monkey_patch',
|
|
|
|
default=False,
|
|
|
|
help='Whether to log monkey patching'),
|
|
|
|
cfg.ListOpt('monkey_patch_modules',
|
|
|
|
default=[
|
2013-01-31 12:55:19 +00:00
|
|
|
'nova.api.ec2.cloud:%s' % (notify_decorator),
|
|
|
|
'nova.compute.api:%s' % (notify_decorator)
|
2012-12-11 08:09:41 +00:00
|
|
|
],
|
|
|
|
help='List of modules/decorators to monkey patch'),
|
|
|
|
]
|
2012-12-12 07:14:12 +00:00
|
|
|
utils_opts = [
|
|
|
|
cfg.IntOpt('password_length',
|
|
|
|
default=12,
|
|
|
|
help='Length of generated instance admin passwords'),
|
2012-12-12 07:21:11 +00:00
|
|
|
cfg.StrOpt('instance_usage_audit_period',
|
|
|
|
default='month',
|
|
|
|
help='time period to generate instance usages for. '
|
|
|
|
'Time period must be hour, day, month or year'),
|
|
|
|
cfg.StrOpt('rootwrap_config',
|
|
|
|
default="/etc/nova/rootwrap.conf",
|
|
|
|
help='Path to the rootwrap configuration file to use for '
|
|
|
|
'running commands as root'),
|
2013-01-23 01:55:09 +00:00
|
|
|
cfg.StrOpt('tempdir',
|
|
|
|
default=None,
|
|
|
|
help='Explicitly specify the temporary working directory'),
|
2012-12-12 07:14:12 +00:00
|
|
|
]
|
2012-11-17 22:50:17 +00:00
|
|
|
CONF = cfg.CONF
|
2012-12-11 08:09:41 +00:00
|
|
|
CONF.register_opts(monkey_patch_opts)
|
2012-12-12 07:14:12 +00:00
|
|
|
CONF.register_opts(utils_opts)
|
2011-12-10 14:01:17 -08:00
|
|
|
|
2012-12-12 07:14:12 +00:00
|
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
|
2012-09-17 14:53:12 -07:00
|
|
|
# Used for looking up extensions of text
|
|
|
|
# to their 'multiplied' byte amount
|
|
|
|
BYTE_MULTIPLIERS = {
|
|
|
|
'': 1,
|
|
|
|
't': 1024 ** 4,
|
|
|
|
'g': 1024 ** 3,
|
|
|
|
'm': 1024 ** 2,
|
|
|
|
'k': 1024,
|
|
|
|
}
|
|
|
|
|
2013-06-13 09:27:38 -04:00
|
|
|
# used in limits
|
|
|
|
TIME_UNITS = {
|
|
|
|
'SECOND': 1,
|
|
|
|
'MINUTE': 60,
|
|
|
|
'HOUR': 3600,
|
|
|
|
'DAY': 84400
|
|
|
|
}
|
|
|
|
|
2013-05-11 21:25:16 +08:00
|
|
|
synchronized = lockutils.synchronized_with_prefix('nova-')
|
|
|
|
|
2011-12-10 14:01:17 -08:00
|
|
|
|
2010-11-23 21:48:32 +00:00
|
|
|
def vpn_ping(address, port, timeout=0.05, session_id=None):
|
|
|
|
"""Sends a vpn negotiation packet and returns the server session.
|
|
|
|
|
|
|
|
Returns False on a failure. Basic packet structure is below.
|
|
|
|
|
|
|
|
Client packet (14 bytes)::
|
2012-03-05 14:33:40 -05:00
|
|
|
|
|
|
|
0 1 8 9 13
|
|
|
|
+-+--------+-----+
|
|
|
|
|x| cli_id |?????|
|
|
|
|
+-+--------+-----+
|
|
|
|
x = packet identifier 0x38
|
|
|
|
cli_id = 64 bit identifier
|
|
|
|
? = unknown, probably flags/padding
|
2010-11-23 21:48:32 +00:00
|
|
|
|
|
|
|
Server packet (26 bytes)::
|
2012-03-05 14:33:40 -05:00
|
|
|
|
|
|
|
0 1 8 9 13 14 21 2225
|
|
|
|
+-+--------+-----+--------+----+
|
|
|
|
|x| srv_id |?????| cli_id |????|
|
|
|
|
+-+--------+-----+--------+----+
|
|
|
|
x = packet identifier 0x40
|
|
|
|
cli_id = 64 bit identifier
|
|
|
|
? = unknown, probably flags/padding
|
|
|
|
bit 9 was 1 and the rest were 0 in testing
|
2011-04-20 12:08:22 -07:00
|
|
|
|
2010-11-23 21:48:32 +00:00
|
|
|
"""
|
|
|
|
if session_id is None:
|
|
|
|
session_id = random.randint(0, 0xffffffffffffffff)
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
2012-01-31 17:42:23 -05:00
|
|
|
data = struct.pack('!BQxxxxx', 0x38, session_id)
|
2010-11-23 21:48:32 +00:00
|
|
|
sock.sendto(data, (address, port))
|
|
|
|
sock.settimeout(timeout)
|
|
|
|
try:
|
|
|
|
received = sock.recv(2048)
|
|
|
|
except socket.timeout:
|
|
|
|
return False
|
|
|
|
finally:
|
|
|
|
sock.close()
|
2011-04-20 12:08:22 -07:00
|
|
|
fmt = '!BQxxxxxQxxxx'
|
2010-11-23 21:48:32 +00:00
|
|
|
if len(received) != struct.calcsize(fmt):
|
2013-04-02 10:45:45 +09:00
|
|
|
LOG.warn(_('Expected to receive %(exp)s bytes, but actually %(act)s') %
|
|
|
|
dict(exp=struct.calcsize(fmt), act=len(received)))
|
2010-11-23 21:48:32 +00:00
|
|
|
return False
|
|
|
|
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
|
|
|
|
if identifier == 0x40 and client_sess == session_id:
|
|
|
|
return server_sess
|
|
|
|
|
|
|
|
|
2011-03-09 00:30:05 -05:00
|
|
|
def execute(*cmd, **kwargs):
|
2013-05-03 10:47:39 +10:00
|
|
|
"""Convenience wrapper around oslo's execute() method."""
|
|
|
|
if 'run_as_root' in kwargs and not 'root_helper' in kwargs:
|
|
|
|
kwargs['root_helper'] = 'sudo nova-rootwrap %s' % CONF.rootwrap_config
|
|
|
|
return processutils.execute(*cmd, **kwargs)
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2010-06-10 18:39:07 +01:00
|
|
|
|
Bug#898257 abstract out disk image access methods
Rather than providing two mutually exlusive image
access methods (loop and qemu-nbd), try each in turn.
This is to prepare for a follow up patch which will
add libguestfs as a method to try.
* nova/virt/mount.py: A new Mount class to abstract the
devce allocation, partition mapping and file sys mounting,
for each access type.
* nova/virt/disk/loop.py: A specialization of the base Mount class
to provide loop back mounting support.
* nova/virt/disk/nbd.py: A specialization of the base Mount class
to provide qemu-nbd mounting support.
* nova/virt/disk/api.py: A new file containing the nova.virt.disk
module interface.
(img_handlers): A new list of access methods to try,
with the order being honored.
(_DiskImage): An internal helper class that uses the plugin classes
above, to provide the operations available on a disk image file.
When mounting, iterate over each access method until one succeeds.
If a hint is provided about a CoW format image, the list of
methods to try will be reduced accordingly.
Note expected errors are no longer raised as exceptions during mounting.
Instead, on failure to mount an image, errors are collated and raised.
Interveining errors are logged in debug mode for successful mounts.
* nova/virt/libvirt/connection.py: Adjust the function parameter
names to be more general, rather than referencing specific
implementations like 'nbd' and 'tune2fs'.
Simplify the destroy_container() by storing and passing
back a reference to the _DiskImage object, which has the
necessary state to unmount.
* nova/utils.py (trycmd): A helper function to both deal with,
commands that issue ignorable warnings to stderr,
and commands that EXIT_SUCCESS while issuing errors to stderr.
nova/virt/xenapi/vm_utils.py: Adjust for the moved virt.disk package
Change-Id: If3a4b1c8f4e2f2e7300a21071340dcc839cb36d7
2011-11-30 17:00:17 +00:00
|
|
|
def trycmd(*args, **kwargs):
|
2013-05-07 20:51:24 +10:00
|
|
|
"""Convenience wrapper around oslo's trycmd() method."""
|
|
|
|
if 'run_as_root' in kwargs and not 'root_helper' in kwargs:
|
|
|
|
kwargs['root_helper'] = 'sudo nova-rootwrap %s' % CONF.rootwrap_config
|
|
|
|
return processutils.trycmd(*args, **kwargs)
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2010-06-10 18:39:07 +01:00
|
|
|
|
2010-11-01 16:25:56 -07:00
|
|
|
def novadir():
|
|
|
|
import nova
|
2011-08-08 20:23:15 -04:00
|
|
|
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
|
2010-09-17 19:28:10 -07:00
|
|
|
|
|
|
|
|
2010-05-27 23:05:26 -07:00
|
|
|
def debug(arg):
|
2011-01-04 00:23:35 -05:00
|
|
|
LOG.debug(_('debug in callback: %s'), arg)
|
2010-05-27 23:05:26 -07:00
|
|
|
return arg
|
|
|
|
|
2010-06-10 18:39:07 +01:00
|
|
|
|
2010-05-27 23:05:26 -07:00
|
|
|
def generate_uid(topic, size=8):
|
2010-10-25 17:20:10 -07:00
|
|
|
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
|
2012-05-04 15:54:12 -07:00
|
|
|
choices = [random.choice(characters) for _x in xrange(size)]
|
2010-10-25 17:20:10 -07:00
|
|
|
return '%s-%s' % (topic, ''.join(choices))
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2010-06-10 18:39:07 +01:00
|
|
|
|
2011-03-15 11:24:07 -07:00
|
|
|
# Default symbols to use for passwords. Avoids visually confusing characters.
|
|
|
|
# ~6 bits per symbol
|
2011-12-29 17:23:27 +00:00
|
|
|
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
|
|
|
|
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
|
2011-04-20 12:08:22 -07:00
|
|
|
'abcdefghijkmnopqrstuvwxyz') # Removed: l
|
2011-03-15 11:24:07 -07:00
|
|
|
|
|
|
|
|
|
|
|
# ~5 bits per symbol
|
2011-12-29 17:23:27 +00:00
|
|
|
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
|
2011-04-20 12:08:22 -07:00
|
|
|
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
|
2011-03-15 11:24:07 -07:00
|
|
|
|
|
|
|
|
2012-07-06 18:28:21 +00:00
|
|
|
def last_completed_audit_period(unit=None, before=None):
|
2012-03-07 00:49:57 +00:00
|
|
|
"""This method gives you the most recently *completed* audit period.
|
|
|
|
|
|
|
|
arguments:
|
|
|
|
units: string, one of 'hour', 'day', 'month', 'year'
|
|
|
|
Periods normally begin at the beginning (UTC) of the
|
|
|
|
period unit (So a 'day' period begins at midnight UTC,
|
|
|
|
a 'month' unit on the 1st, a 'year' on Jan, 1)
|
|
|
|
unit string may be appended with an optional offset
|
|
|
|
like so: 'day@18' This will begin the period at 18:00
|
|
|
|
UTC. 'month@15' starts a monthly period on the 15th,
|
|
|
|
and year@3 begins a yearly one on March 1st.
|
2012-07-06 18:28:21 +00:00
|
|
|
before: Give the audit period most recently completed before
|
|
|
|
<timestamp>. Defaults to now.
|
2012-03-07 00:49:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
returns: 2 tuple of datetimes (begin, end)
|
|
|
|
The begin timestamp of this audit period is the same as the
|
2013-06-08 13:18:51 +02:00
|
|
|
end of the previous.
|
|
|
|
"""
|
2011-09-30 00:39:46 +00:00
|
|
|
if not unit:
|
2012-11-04 21:32:45 +00:00
|
|
|
unit = CONF.instance_usage_audit_period
|
2012-03-07 00:49:57 +00:00
|
|
|
|
|
|
|
offset = 0
|
|
|
|
if '@' in unit:
|
|
|
|
unit, offset = unit.split("@", 1)
|
|
|
|
offset = int(offset)
|
|
|
|
|
2012-07-06 18:28:21 +00:00
|
|
|
if before is not None:
|
|
|
|
rightnow = before
|
|
|
|
else:
|
|
|
|
rightnow = timeutils.utcnow()
|
2011-09-30 00:39:46 +00:00
|
|
|
if unit not in ('month', 'day', 'year', 'hour'):
|
|
|
|
raise ValueError('Time period must be hour, day, month or year')
|
|
|
|
if unit == 'month':
|
2012-03-07 00:49:57 +00:00
|
|
|
if offset == 0:
|
|
|
|
offset = 1
|
|
|
|
end = datetime.datetime(day=offset,
|
2011-09-30 00:39:46 +00:00
|
|
|
month=rightnow.month,
|
|
|
|
year=rightnow.year)
|
2012-03-07 00:49:57 +00:00
|
|
|
if end >= rightnow:
|
|
|
|
year = rightnow.year
|
|
|
|
if 1 >= rightnow.month:
|
|
|
|
year -= 1
|
|
|
|
month = 12 + (rightnow.month - 1)
|
|
|
|
else:
|
|
|
|
month = rightnow.month - 1
|
|
|
|
end = datetime.datetime(day=offset,
|
|
|
|
month=month,
|
|
|
|
year=year)
|
|
|
|
year = end.year
|
|
|
|
if 1 >= end.month:
|
|
|
|
year -= 1
|
|
|
|
month = 12 + (end.month - 1)
|
|
|
|
else:
|
|
|
|
month = end.month - 1
|
|
|
|
begin = datetime.datetime(day=offset, month=month, year=year)
|
2011-09-30 00:39:46 +00:00
|
|
|
|
|
|
|
elif unit == 'year':
|
2012-03-07 00:49:57 +00:00
|
|
|
if offset == 0:
|
|
|
|
offset = 1
|
|
|
|
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
|
|
|
|
if end >= rightnow:
|
|
|
|
end = datetime.datetime(day=1,
|
|
|
|
month=offset,
|
|
|
|
year=rightnow.year - 1)
|
|
|
|
begin = datetime.datetime(day=1,
|
|
|
|
month=offset,
|
|
|
|
year=rightnow.year - 2)
|
|
|
|
else:
|
|
|
|
begin = datetime.datetime(day=1,
|
|
|
|
month=offset,
|
|
|
|
year=rightnow.year - 1)
|
2011-09-30 00:39:46 +00:00
|
|
|
|
|
|
|
elif unit == 'day':
|
2012-03-07 00:49:57 +00:00
|
|
|
end = datetime.datetime(hour=offset,
|
|
|
|
day=rightnow.day,
|
2011-09-30 00:39:46 +00:00
|
|
|
month=rightnow.month,
|
|
|
|
year=rightnow.year)
|
2012-03-07 00:49:57 +00:00
|
|
|
if end >= rightnow:
|
|
|
|
end = end - datetime.timedelta(days=1)
|
|
|
|
begin = end - datetime.timedelta(days=1)
|
|
|
|
|
2011-09-30 00:39:46 +00:00
|
|
|
elif unit == 'hour':
|
2012-03-07 00:49:57 +00:00
|
|
|
end = rightnow.replace(minute=offset, second=0, microsecond=0)
|
|
|
|
if end >= rightnow:
|
|
|
|
end = end - datetime.timedelta(hours=1)
|
|
|
|
begin = end - datetime.timedelta(hours=1)
|
2011-09-30 00:39:46 +00:00
|
|
|
|
|
|
|
return (begin, end)
|
|
|
|
|
|
|
|
|
2012-12-12 07:14:12 +00:00
|
|
|
def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
|
2011-12-29 17:23:27 +00:00
|
|
|
"""Generate a random password from the supplied symbol groups.
|
|
|
|
|
|
|
|
At least one symbol from each group will be included. Unpredictable
|
|
|
|
results if length is less than the number of symbol groups.
|
2011-03-14 20:48:33 -07:00
|
|
|
|
|
|
|
Believed to be reasonably secure (with a reasonable password length!)
|
2011-04-20 12:08:22 -07:00
|
|
|
|
2011-02-17 22:09:26 +00:00
|
|
|
"""
|
2012-12-12 07:14:12 +00:00
|
|
|
if length is None:
|
|
|
|
length = CONF.password_length
|
|
|
|
|
2011-03-15 11:24:07 -07:00
|
|
|
r = random.SystemRandom()
|
2011-12-29 17:23:27 +00:00
|
|
|
|
|
|
|
# NOTE(jerdfelt): Some password policies require at least one character
|
|
|
|
# from each group of symbols, so start off with one random character
|
|
|
|
# from each symbol group
|
|
|
|
password = [r.choice(s) for s in symbolgroups]
|
|
|
|
# If length < len(symbolgroups), the leading characters will only
|
|
|
|
# be from the first length groups. Try our best to not be predictable
|
|
|
|
# by shuffling and then truncating.
|
|
|
|
r.shuffle(password)
|
|
|
|
password = password[:length]
|
|
|
|
length -= len(password)
|
|
|
|
|
|
|
|
# then fill with random characters from all symbol groups
|
|
|
|
symbols = ''.join(symbolgroups)
|
|
|
|
password.extend([r.choice(symbols) for _i in xrange(length)])
|
|
|
|
|
|
|
|
# finally shuffle to ensure first x characters aren't from a
|
|
|
|
# predictable group
|
|
|
|
r.shuffle(password)
|
|
|
|
|
|
|
|
return ''.join(password)
|
2011-02-17 22:09:26 +00:00
|
|
|
|
|
|
|
|
2010-05-27 23:05:26 -07:00
|
|
|
def last_octet(address):
|
2011-04-20 12:08:22 -07:00
|
|
|
return int(address.split('.')[-1])
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2010-06-10 18:39:07 +01:00
|
|
|
|
2013-06-25 08:54:53 -04:00
|
|
|
def get_my_ipv4_address():
|
|
|
|
"""Run ip route/addr commands to figure out the best ipv4
|
|
|
|
"""
|
|
|
|
LOCALHOST = '127.0.0.1'
|
|
|
|
try:
|
|
|
|
out = execute('ip', '-f', 'inet', '-o', 'route', 'show',
|
|
|
|
run_as_root=True)
|
|
|
|
|
|
|
|
# Find the default route
|
|
|
|
regex_default = ('default\s*via\s*'
|
|
|
|
'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
|
|
|
|
'\s*dev\s*(\w*)\s*')
|
|
|
|
default_routes = re.findall(regex_default, out[0])
|
|
|
|
if not default_routes:
|
|
|
|
return LOCALHOST
|
|
|
|
gateway, iface = default_routes[0]
|
|
|
|
|
|
|
|
# Find the right subnet for the gateway/interface for
|
|
|
|
# the default route
|
|
|
|
route = ('(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\/(\d{1,2})'
|
|
|
|
'\s*dev\s*(\w*)\s*')
|
|
|
|
for match in re.finditer(route, out[0]):
|
|
|
|
subnet = netaddr.IPNetwork(match.group(1) + "/" + match.group(2))
|
|
|
|
if (match.group(3) == iface and
|
2013-06-07 17:08:38 -07:00
|
|
|
netaddr.IPAddress(gateway) in subnet):
|
2013-06-25 08:54:53 -04:00
|
|
|
try:
|
|
|
|
return _get_ipv4_address_for_interface(iface)
|
|
|
|
except exception.NovaException:
|
|
|
|
pass
|
|
|
|
except Exception as ex:
|
|
|
|
LOG.error(_("Couldn't get IPv4 : %(ex)s") % {'ex': ex})
|
|
|
|
return LOCALHOST
|
|
|
|
|
|
|
|
|
|
|
|
def _get_ipv4_address_for_interface(iface):
|
|
|
|
"""Run ip addr show for an interface and grab its ipv4 addresses
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
out = execute('ip', '-f', 'inet', '-o', 'addr', 'show', iface,
|
|
|
|
run_as_root=True)
|
|
|
|
regexp_address = re.compile('inet\s*'
|
|
|
|
'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})')
|
|
|
|
address = [m.group(1) for m in regexp_address.finditer(out[0])
|
|
|
|
if m.group(1) != '127.0.0.1']
|
|
|
|
if address:
|
|
|
|
return address[0]
|
|
|
|
else:
|
|
|
|
msg = _('IPv4 address is not found.: %s') % out[0]
|
|
|
|
raise exception.NovaException(msg)
|
|
|
|
except Exception as ex:
|
|
|
|
msg = _("Couldn't get IPv4 of %(interface)s"
|
|
|
|
" : %(ex)s") % {'interface': iface, 'ex': ex}
|
|
|
|
LOG.error(msg)
|
|
|
|
raise exception.NovaException(msg)
|
|
|
|
|
|
|
|
|
2011-09-30 00:39:46 +00:00
|
|
|
def get_my_linklocal(interface):
|
2010-12-24 20:38:49 +09:00
|
|
|
try:
|
2011-04-20 12:08:22 -07:00
|
|
|
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
|
|
|
|
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
|
2010-12-24 20:38:49 +09:00
|
|
|
links = [re.search(condition, x) for x in if_str[0].split('\n')]
|
|
|
|
address = [w.group(1) for w in links if w is not None]
|
|
|
|
if address[0] is not None:
|
|
|
|
return address[0]
|
|
|
|
else:
|
2012-05-03 14:29:50 -04:00
|
|
|
msg = _('Link Local address is not found.:%s') % if_str
|
|
|
|
raise exception.NovaException(msg)
|
2011-01-18 11:41:05 +09:00
|
|
|
except Exception as ex:
|
2012-05-03 14:29:50 -04:00
|
|
|
msg = _("Couldn't get Link Local IP of %(interface)s"
|
|
|
|
" :%(ex)s") % locals()
|
|
|
|
raise exception.NovaException(msg)
|
2010-12-24 20:38:49 +09:00
|
|
|
|
|
|
|
|
2010-11-11 19:52:36 -06:00
|
|
|
def parse_mailmap(mailmap='.mailmap'):
|
|
|
|
mapping = {}
|
|
|
|
if os.path.exists(mailmap):
|
|
|
|
fp = open(mailmap, 'r')
|
|
|
|
for l in fp:
|
|
|
|
l = l.strip()
|
|
|
|
if not l.startswith('#') and ' ' in l:
|
|
|
|
canonical_email, alias = l.split(' ')
|
2012-01-06 17:49:57 +00:00
|
|
|
mapping[alias.lower()] = canonical_email.lower()
|
2010-11-11 19:52:36 -06:00
|
|
|
return mapping
|
|
|
|
|
2010-11-23 21:58:46 +01:00
|
|
|
|
2010-11-11 19:52:36 -06:00
|
|
|
def str_dict_replace(s, mapping):
|
|
|
|
for s1, s2 in mapping.iteritems():
|
|
|
|
s = s.replace(s1, s2)
|
|
|
|
return s
|
|
|
|
|
2010-08-19 12:28:45 +02:00
|
|
|
|
|
|
|
class LazyPluggable(object):
|
|
|
|
"""A pluggable backend loaded lazily based on some value."""
|
2010-08-27 23:10:57 -07:00
|
|
|
|
2013-01-21 21:18:34 -05:00
|
|
|
def __init__(self, pivot, config_group=None, **backends):
|
2010-08-19 12:28:45 +02:00
|
|
|
self.__backends = backends
|
|
|
|
self.__pivot = pivot
|
|
|
|
self.__backend = None
|
2013-01-21 21:18:34 -05:00
|
|
|
self.__config_group = config_group
|
2010-08-27 23:10:57 -07:00
|
|
|
|
2010-08-19 12:28:45 +02:00
|
|
|
def __get_backend(self):
|
|
|
|
if not self.__backend:
|
2013-01-21 21:18:34 -05:00
|
|
|
if self.__config_group is None:
|
|
|
|
backend_name = CONF[self.__pivot]
|
|
|
|
else:
|
|
|
|
backend_name = CONF[self.__config_group][self.__pivot]
|
2010-08-19 12:28:45 +02:00
|
|
|
if backend_name not in self.__backends:
|
2012-05-03 14:29:50 -04:00
|
|
|
msg = _('Invalid backend: %s') % backend_name
|
|
|
|
raise exception.NovaException(msg)
|
2010-08-27 23:10:57 -07:00
|
|
|
|
2010-08-19 12:28:45 +02:00
|
|
|
backend = self.__backends[backend_name]
|
2012-01-02 17:31:36 +08:00
|
|
|
if isinstance(backend, tuple):
|
2010-08-19 12:28:45 +02:00
|
|
|
name = backend[0]
|
|
|
|
fromlist = backend[1]
|
|
|
|
else:
|
|
|
|
name = backend
|
|
|
|
fromlist = backend
|
2010-08-27 23:10:57 -07:00
|
|
|
|
2010-08-19 12:28:45 +02:00
|
|
|
self.__backend = __import__(name, None, None, fromlist)
|
|
|
|
return self.__backend
|
2010-08-27 23:10:57 -07:00
|
|
|
|
2010-08-19 12:28:45 +02:00
|
|
|
def __getattr__(self, key):
|
|
|
|
backend = self.__get_backend()
|
|
|
|
return getattr(backend, key)
|
|
|
|
|
2010-10-21 11:49:51 -07:00
|
|
|
|
2010-10-25 19:21:09 +09:00
|
|
|
def xhtml_escape(value):
|
|
|
|
"""Escapes a string so it is valid within XML or XHTML.
|
2010-10-26 15:37:32 -07:00
|
|
|
|
2010-10-25 19:21:09 +09:00
|
|
|
"""
|
2012-02-19 19:15:12 -08:00
|
|
|
return saxutils.escape(value, {'"': '"', "'": '''})
|
2010-10-25 19:21:09 +09:00
|
|
|
|
|
|
|
|
|
|
|
def utf8(value):
|
|
|
|
"""Try to turn a string into utf-8 if possible.
|
|
|
|
|
|
|
|
Code is directly from the utf8 function in
|
|
|
|
http://github.com/facebook/tornado/blob/master/tornado/escape.py
|
|
|
|
|
|
|
|
"""
|
|
|
|
if isinstance(value, unicode):
|
2011-04-20 12:08:22 -07:00
|
|
|
return value.encode('utf-8')
|
2010-10-25 19:21:09 +09:00
|
|
|
assert isinstance(value, str)
|
|
|
|
return value
|
2010-12-22 17:53:42 -08:00
|
|
|
|
|
|
|
|
2011-02-23 14:07:08 -08:00
|
|
|
def get_from_path(items, path):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Returns a list of items matching the specified path.
|
|
|
|
|
|
|
|
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
|
|
|
|
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
|
2011-02-23 14:07:08 -08:00
|
|
|
intermediate results are lists it will treat each list item individually.
|
|
|
|
A 'None' in items or any child expressions will be ignored, this function
|
|
|
|
will not throw because of None (anywhere) in items. The returned list
|
2011-04-20 12:08:22 -07:00
|
|
|
will contain no None values.
|
2011-02-23 12:05:49 -08:00
|
|
|
|
2011-04-20 12:08:22 -07:00
|
|
|
"""
|
2011-02-23 14:07:08 -08:00
|
|
|
if path is None:
|
2012-05-03 14:29:50 -04:00
|
|
|
raise exception.NovaException('Invalid mini_xpath')
|
2011-02-23 12:05:49 -08:00
|
|
|
|
2011-04-20 12:08:22 -07:00
|
|
|
(first_token, sep, remainder) = path.partition('/')
|
2011-02-23 12:05:49 -08:00
|
|
|
|
2011-04-20 12:08:22 -07:00
|
|
|
if first_token == '':
|
2012-05-03 14:29:50 -04:00
|
|
|
raise exception.NovaException('Invalid mini_xpath')
|
2011-02-23 12:05:49 -08:00
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
if items is None:
|
|
|
|
return results
|
|
|
|
|
2012-01-02 17:31:36 +08:00
|
|
|
if not isinstance(items, list):
|
2011-02-23 12:36:09 -08:00
|
|
|
# Wrap single objects in a list
|
|
|
|
items = [items]
|
|
|
|
|
2011-02-23 12:05:49 -08:00
|
|
|
for item in items:
|
|
|
|
if item is None:
|
|
|
|
continue
|
2011-04-20 12:08:22 -07:00
|
|
|
get_method = getattr(item, 'get', None)
|
2011-02-23 12:05:49 -08:00
|
|
|
if get_method is None:
|
|
|
|
continue
|
|
|
|
child = get_method(first_token)
|
|
|
|
if child is None:
|
|
|
|
continue
|
2012-01-02 17:31:36 +08:00
|
|
|
if isinstance(child, list):
|
2011-02-23 12:05:49 -08:00
|
|
|
# Flatten intermediate lists
|
|
|
|
for x in child:
|
|
|
|
results.append(x)
|
|
|
|
else:
|
|
|
|
results.append(child)
|
|
|
|
|
|
|
|
if not sep:
|
|
|
|
# No more tokens
|
|
|
|
return results
|
|
|
|
else:
|
2011-02-23 14:07:08 -08:00
|
|
|
return get_from_path(results, remainder)
|
2011-03-22 20:26:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def flatten_dict(dict_, flattened=None):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Recursively flatten a nested dictionary."""
|
2011-03-22 20:26:45 +00:00
|
|
|
flattened = flattened or {}
|
|
|
|
for key, value in dict_.iteritems():
|
|
|
|
if hasattr(value, 'iteritems'):
|
|
|
|
flatten_dict(value, flattened)
|
|
|
|
else:
|
|
|
|
flattened[key] = value
|
|
|
|
return flattened
|
|
|
|
|
|
|
|
|
|
|
|
def partition_dict(dict_, keys):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Return two dicts, one with `keys` the other with everything else."""
|
2011-03-22 20:26:45 +00:00
|
|
|
intersection = {}
|
|
|
|
difference = {}
|
|
|
|
for key, value in dict_.iteritems():
|
|
|
|
if key in keys:
|
|
|
|
intersection[key] = value
|
|
|
|
else:
|
|
|
|
difference[key] = value
|
|
|
|
return intersection, difference
|
|
|
|
|
|
|
|
|
|
|
|
def map_dict_keys(dict_, key_map):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Return a dict in which the dictionaries keys are mapped to new keys."""
|
2011-03-22 20:26:45 +00:00
|
|
|
mapped = {}
|
|
|
|
for key, value in dict_.iteritems():
|
|
|
|
mapped_key = key_map[key] if key in key_map else key
|
|
|
|
mapped[mapped_key] = value
|
|
|
|
return mapped
|
2011-03-23 05:50:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def subset_dict(dict_, keys):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Return a dict that only contains a subset of keys."""
|
2011-03-23 05:50:53 +00:00
|
|
|
subset = partition_dict(dict_, keys)[0]
|
|
|
|
return subset
|
2011-03-24 21:13:55 +00:00
|
|
|
|
|
|
|
|
2012-07-25 15:31:11 -05:00
|
|
|
def diff_dict(orig, new):
|
|
|
|
"""
|
|
|
|
Return a dict describing how to change orig to new. The keys
|
|
|
|
correspond to values that have changed; the value will be a list
|
|
|
|
of one or two elements. The first element of the list will be
|
|
|
|
either '+' or '-', indicating whether the key was updated or
|
|
|
|
deleted; if the key was updated, the list will contain a second
|
|
|
|
element, giving the updated value.
|
|
|
|
"""
|
|
|
|
# Figure out what keys went away
|
|
|
|
result = dict((k, ['-']) for k in set(orig.keys()) - set(new.keys()))
|
|
|
|
# Compute the updates
|
|
|
|
for key, value in new.items():
|
|
|
|
if key not in orig or value != orig[key]:
|
|
|
|
result[key] = ['+', value]
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2011-03-22 21:42:17 -07:00
|
|
|
def check_isinstance(obj, cls):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Checks that obj is of type cls, and lets PyLint infer types."""
|
2011-03-14 14:17:58 -07:00
|
|
|
if isinstance(obj, cls):
|
|
|
|
return obj
|
2011-04-20 12:08:22 -07:00
|
|
|
raise Exception(_('Expected object of type: %s') % (str(cls)))
|
2011-04-22 21:35:54 +09:00
|
|
|
|
|
|
|
|
|
|
|
def parse_server_string(server_str):
|
|
|
|
"""
|
|
|
|
Parses the given server_string and returns a list of host and port.
|
|
|
|
If it's not a combination of host part and port, the port element
|
|
|
|
is a null string. If the input is invalid expression, return a null
|
|
|
|
list.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# First of all, exclude pure IPv6 address (w/o port).
|
|
|
|
if netaddr.valid_ipv6(server_str):
|
|
|
|
return (server_str, '')
|
|
|
|
|
|
|
|
# Next, check if this is IPv6 address with a port number combination.
|
|
|
|
if server_str.find("]:") != -1:
|
|
|
|
(address, port) = server_str.replace('[', '', 1).split(']:')
|
|
|
|
return (address, port)
|
|
|
|
|
|
|
|
# Third, check if this is a combination of an address and a port
|
|
|
|
if server_str.find(':') == -1:
|
|
|
|
return (server_str, '')
|
|
|
|
|
|
|
|
# This must be a combination of an address and a port
|
|
|
|
(address, port) = server_str.split(':')
|
|
|
|
return (address, port)
|
|
|
|
|
2011-08-03 19:17:08 -04:00
|
|
|
except Exception:
|
2012-05-01 23:21:47 +08:00
|
|
|
LOG.error(_('Invalid server_string: %s'), server_str)
|
2011-04-22 21:35:54 +09:00
|
|
|
return ('', '')
|
2011-06-15 20:11:34 +00:00
|
|
|
|
|
|
|
|
2013-01-16 14:36:00 -05:00
|
|
|
def is_int_like(val):
|
|
|
|
"""Check if a value looks like an int."""
|
|
|
|
try:
|
|
|
|
return str(int(val)) == str(val)
|
|
|
|
except Exception:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2011-07-11 13:34:39 -07:00
|
|
|
def is_valid_ipv4(address):
|
2013-02-01 04:06:09 -05:00
|
|
|
"""Verify that address represents a valid IPv4 address."""
|
|
|
|
try:
|
2013-02-15 08:19:00 -05:00
|
|
|
return netaddr.valid_ipv4(address)
|
2013-02-01 04:06:09 -05:00
|
|
|
except Exception:
|
2011-07-11 13:34:39 -07:00
|
|
|
return False
|
2011-08-23 08:46:55 -07:00
|
|
|
|
|
|
|
|
2013-02-11 14:38:34 -05:00
|
|
|
def is_valid_ipv6(address):
|
2013-02-15 08:19:00 -05:00
|
|
|
try:
|
|
|
|
return netaddr.valid_ipv6(address)
|
|
|
|
except Exception:
|
|
|
|
return False
|
2013-02-11 14:38:34 -05:00
|
|
|
|
|
|
|
|
2013-02-18 15:50:36 -05:00
|
|
|
def is_valid_ipv6_cidr(address):
|
|
|
|
try:
|
|
|
|
str(netaddr.IPNetwork(address, version=6).cidr)
|
|
|
|
return True
|
|
|
|
except Exception:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2013-02-11 14:38:34 -05:00
|
|
|
def get_shortened_ipv6(address):
|
|
|
|
addr = netaddr.IPAddress(address, version=6)
|
|
|
|
return str(addr.ipv6())
|
|
|
|
|
|
|
|
|
2013-02-18 15:50:36 -05:00
|
|
|
def get_shortened_ipv6_cidr(address):
|
|
|
|
net = netaddr.IPNetwork(address, version=6)
|
|
|
|
return str(net.cidr)
|
|
|
|
|
|
|
|
|
2011-09-30 15:10:33 +01:00
|
|
|
def is_valid_cidr(address):
|
2013-06-08 13:18:51 +02:00
|
|
|
"""Check if address is valid
|
|
|
|
|
|
|
|
The provided address can be a IPv6 or a IPv4
|
|
|
|
CIDR address.
|
|
|
|
"""
|
2011-09-30 15:10:33 +01:00
|
|
|
try:
|
|
|
|
# Validate the correct CIDR Address
|
|
|
|
netaddr.IPNetwork(address)
|
|
|
|
except netaddr.core.AddrFormatError:
|
|
|
|
return False
|
2012-03-20 13:46:38 +08:00
|
|
|
except UnboundLocalError:
|
|
|
|
# NOTE(MotoKen): work around bug in netaddr 0.7.5 (see detail in
|
|
|
|
# https://github.com/drkjam/netaddr/issues/2)
|
|
|
|
return False
|
2011-09-30 15:10:33 +01:00
|
|
|
|
|
|
|
# Prior validation partially verify /xx part
|
|
|
|
# Verify it here
|
|
|
|
ip_segment = address.split('/')
|
|
|
|
|
|
|
|
if (len(ip_segment) <= 1 or
|
2013-06-07 17:08:38 -07:00
|
|
|
ip_segment[1] == ''):
|
2011-09-30 15:10:33 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2013-02-15 10:49:12 -08:00
|
|
|
def get_ip_version(network):
|
2013-06-08 13:18:51 +02:00
|
|
|
"""Returns the IP version of a network (IPv4 or IPv6).
|
|
|
|
|
|
|
|
Raises AddrFormatError if invalid network.
|
|
|
|
"""
|
2013-02-15 10:49:12 -08:00
|
|
|
if netaddr.IPNetwork(network).version == 6:
|
|
|
|
return "IPv6"
|
|
|
|
elif netaddr.IPNetwork(network).version == 4:
|
|
|
|
return "IPv4"
|
|
|
|
|
|
|
|
|
2011-08-19 12:30:55 -07:00
|
|
|
def monkey_patch():
|
2013-01-04 15:46:18 -05:00
|
|
|
"""If the Flags.monkey_patch set as True,
|
2011-11-09 13:01:35 -08:00
|
|
|
this function patches a decorator
|
2011-08-22 14:24:37 -07:00
|
|
|
for all functions in specified modules.
|
|
|
|
You can set decorators for each modules
|
2012-11-04 21:32:45 +00:00
|
|
|
using CONF.monkey_patch_modules.
|
2011-08-22 14:24:37 -07:00
|
|
|
The format is "Module path:Decorator function".
|
2013-01-31 12:55:19 +00:00
|
|
|
Example:
|
|
|
|
'nova.api.ec2.cloud:nova.openstack.common.notifier.api.notify_decorator'
|
2011-08-22 14:24:37 -07:00
|
|
|
|
|
|
|
Parameters of the decorator is as follows.
|
2013-01-31 12:55:19 +00:00
|
|
|
(See nova.openstack.common.notifier.api.notify_decorator)
|
2011-08-22 14:24:37 -07:00
|
|
|
|
|
|
|
name - name of the function
|
|
|
|
function - object of the function
|
|
|
|
"""
|
2012-11-04 21:32:45 +00:00
|
|
|
# If CONF.monkey_patch is not True, this function do nothing.
|
|
|
|
if not CONF.monkey_patch:
|
2011-08-19 12:30:55 -07:00
|
|
|
return
|
2011-08-23 08:07:25 -07:00
|
|
|
# Get list of modules and decorators
|
2012-11-04 21:32:45 +00:00
|
|
|
for module_and_decorator in CONF.monkey_patch_modules:
|
2011-08-19 12:30:55 -07:00
|
|
|
module, decorator_name = module_and_decorator.split(':')
|
2011-08-22 14:24:37 -07:00
|
|
|
# import decorator function
|
2012-04-27 14:14:18 -04:00
|
|
|
decorator = importutils.import_class(decorator_name)
|
2011-08-19 12:30:55 -07:00
|
|
|
__import__(module)
|
2011-08-23 08:07:25 -07:00
|
|
|
# Retrieve module information using pyclbr
|
2011-08-19 12:30:55 -07:00
|
|
|
module_data = pyclbr.readmodule_ex(module)
|
|
|
|
for key in module_data.keys():
|
2011-08-22 14:24:37 -07:00
|
|
|
# set the decorator for the class methods
|
2011-08-19 12:30:55 -07:00
|
|
|
if isinstance(module_data[key], pyclbr.Class):
|
2012-04-27 14:14:18 -04:00
|
|
|
clz = importutils.import_class("%s.%s" % (module, key))
|
2011-08-19 12:30:55 -07:00
|
|
|
for method, func in inspect.getmembers(clz, inspect.ismethod):
|
2012-01-13 16:03:45 +05:30
|
|
|
setattr(clz, method,
|
2011-08-22 14:24:37 -07:00
|
|
|
decorator("%s.%s.%s" % (module, key, method), func))
|
|
|
|
# set the decorator for the function
|
2011-08-19 12:30:55 -07:00
|
|
|
if isinstance(module_data[key], pyclbr.Function):
|
2012-04-27 14:14:18 -04:00
|
|
|
func = importutils.import_class("%s.%s" % (module, key))
|
2012-01-13 16:03:45 +05:30
|
|
|
setattr(sys.modules[module], key,
|
2011-08-22 14:24:37 -07:00
|
|
|
decorator("%s.%s" % (module, key), func))
|
2011-08-31 17:06:15 -07:00
|
|
|
|
|
|
|
|
2011-09-02 11:00:33 -07:00
|
|
|
def convert_to_list_dict(lst, label):
|
2013-01-08 17:00:49 -05:00
|
|
|
"""Convert a value or list into a list of dicts."""
|
2011-09-02 11:00:33 -07:00
|
|
|
if not lst:
|
2011-08-31 17:06:15 -07:00
|
|
|
return None
|
|
|
|
if not isinstance(lst, list):
|
|
|
|
lst = [lst]
|
|
|
|
return [{label: x} for x in lst]
|
2011-09-14 23:11:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def timefunc(func):
|
2013-01-08 17:00:49 -05:00
|
|
|
"""Decorator that logs how long a particular function took to execute."""
|
2011-09-14 23:11:03 +00:00
|
|
|
@functools.wraps(func)
|
|
|
|
def inner(*args, **kwargs):
|
|
|
|
start_time = time.time()
|
|
|
|
try:
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
total_time = time.time() - start_time
|
|
|
|
LOG.debug(_("timefunc: '%(name)s' took %(total_time).2f secs") %
|
|
|
|
dict(name=func.__name__, total_time=total_time))
|
|
|
|
return inner
|
2011-09-21 15:59:40 -05:00
|
|
|
|
|
|
|
|
2011-10-10 17:58:56 -05:00
|
|
|
def make_dev_path(dev, partition=None, base='/dev'):
|
|
|
|
"""Return a path to a particular device.
|
|
|
|
|
|
|
|
>>> make_dev_path('xvdc')
|
|
|
|
/dev/xvdc
|
|
|
|
|
|
|
|
>>> make_dev_path('xvdc', 1)
|
|
|
|
/dev/xvdc1
|
|
|
|
"""
|
|
|
|
path = os.path.join(base, dev)
|
|
|
|
if partition:
|
|
|
|
path += str(partition)
|
|
|
|
return path
|
2011-08-05 16:35:56 +01:00
|
|
|
|
|
|
|
|
|
|
|
def total_seconds(td):
|
2013-01-08 17:00:49 -05:00
|
|
|
"""Local total_seconds implementation for compatibility with python 2.6."""
|
2011-08-05 16:35:56 +01:00
|
|
|
if hasattr(td, 'total_seconds'):
|
|
|
|
return td.total_seconds()
|
|
|
|
else:
|
|
|
|
return ((td.days * 86400 + td.seconds) * 10 ** 6 +
|
|
|
|
td.microseconds) / 10.0 ** 6
|
2011-11-09 18:16:24 -05:00
|
|
|
|
|
|
|
|
|
|
|
def sanitize_hostname(hostname):
|
|
|
|
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
|
|
|
|
if isinstance(hostname, unicode):
|
|
|
|
hostname = hostname.encode('latin-1', 'ignore')
|
|
|
|
|
|
|
|
hostname = re.sub('[ _]', '-', hostname)
|
|
|
|
hostname = re.sub('[^\w.-]+', '', hostname)
|
|
|
|
hostname = hostname.lower()
|
|
|
|
hostname = hostname.strip('.-')
|
|
|
|
|
|
|
|
return hostname
|
2011-12-02 14:18:38 +01:00
|
|
|
|
|
|
|
|
2012-01-04 18:40:46 -08:00
|
|
|
def read_cached_file(filename, cache_info, reload_func=None):
|
|
|
|
"""Read from a file if it has been modified.
|
|
|
|
|
|
|
|
:param cache_info: dictionary to hold opaque cache.
|
|
|
|
:param reload_func: optional function to be called with data when
|
|
|
|
file is reloaded due to a modification.
|
|
|
|
|
|
|
|
:returns: data from file
|
|
|
|
|
2011-12-02 14:18:38 +01:00
|
|
|
"""
|
|
|
|
mtime = os.path.getmtime(filename)
|
2012-01-04 18:40:46 -08:00
|
|
|
if not cache_info or mtime != cache_info.get('mtime'):
|
2012-05-04 10:46:06 +01:00
|
|
|
LOG.debug(_("Reloading cached file %s") % filename)
|
2012-01-04 18:40:46 -08:00
|
|
|
with open(filename) as fap:
|
|
|
|
cache_info['data'] = fap.read()
|
|
|
|
cache_info['mtime'] = mtime
|
|
|
|
if reload_func:
|
|
|
|
reload_func(cache_info['data'])
|
|
|
|
return cache_info['data']
|
2011-12-21 22:40:23 +00:00
|
|
|
|
|
|
|
|
2012-01-09 18:21:57 +11:00
|
|
|
def hash_file(file_like_object):
|
|
|
|
"""Generate a hash for the contents of a file."""
|
|
|
|
checksum = hashlib.sha1()
|
2012-08-04 18:03:11 +01:00
|
|
|
for chunk in iter(lambda: file_like_object.read(32768), b''):
|
|
|
|
checksum.update(chunk)
|
2012-01-09 18:21:57 +11:00
|
|
|
return checksum.hexdigest()
|
|
|
|
|
|
|
|
|
2011-12-21 22:40:23 +00:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def temporary_mutation(obj, **kwargs):
|
|
|
|
"""Temporarily set the attr on a particular object to a given value then
|
|
|
|
revert when finished.
|
|
|
|
|
|
|
|
One use of this is to temporarily set the read_deleted flag on a context
|
|
|
|
object:
|
|
|
|
|
|
|
|
with temporary_mutation(context, read_deleted="yes"):
|
|
|
|
do_something_that_needed_deleted_objects()
|
|
|
|
"""
|
2013-01-28 23:25:23 +00:00
|
|
|
def is_dict_like(thing):
|
|
|
|
return hasattr(thing, 'has_key')
|
|
|
|
|
|
|
|
def get(thing, attr, default):
|
|
|
|
if is_dict_like(thing):
|
|
|
|
return thing.get(attr, default)
|
|
|
|
else:
|
|
|
|
return getattr(thing, attr, default)
|
|
|
|
|
|
|
|
def set_value(thing, attr, val):
|
|
|
|
if is_dict_like(thing):
|
|
|
|
thing[attr] = val
|
|
|
|
else:
|
|
|
|
setattr(thing, attr, val)
|
|
|
|
|
|
|
|
def delete(thing, attr):
|
|
|
|
if is_dict_like(thing):
|
|
|
|
del thing[attr]
|
|
|
|
else:
|
|
|
|
delattr(thing, attr)
|
|
|
|
|
2011-12-21 22:40:23 +00:00
|
|
|
NOT_PRESENT = object()
|
|
|
|
|
|
|
|
old_values = {}
|
|
|
|
for attr, new_value in kwargs.items():
|
2013-01-28 23:25:23 +00:00
|
|
|
old_values[attr] = get(obj, attr, NOT_PRESENT)
|
|
|
|
set_value(obj, attr, new_value)
|
2011-12-21 22:40:23 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
for attr, old_value in old_values.items():
|
|
|
|
if old_value is NOT_PRESENT:
|
2013-01-28 23:25:23 +00:00
|
|
|
delete(obj, attr)
|
2011-12-21 22:40:23 +00:00
|
|
|
else:
|
2013-01-28 23:25:23 +00:00
|
|
|
set_value(obj, attr, old_value)
|
2012-01-10 11:22:33 -06:00
|
|
|
|
|
|
|
|
2012-01-20 17:25:08 -08:00
|
|
|
def generate_mac_address():
|
|
|
|
"""Generate an Ethernet MAC address."""
|
2012-03-14 10:34:33 -07:00
|
|
|
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
|
|
|
|
# bridge mac addresses don't change, but it appears to
|
|
|
|
# conflict with libvirt, so we use the next highest octet
|
|
|
|
# that has the unicast and locally administered bits set
|
|
|
|
# properly: 0xfa.
|
|
|
|
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
|
|
|
|
mac = [0xfa, 0x16, 0x3e,
|
2012-10-31 22:31:46 -07:00
|
|
|
random.randint(0x00, 0xff),
|
2012-01-20 17:25:08 -08:00
|
|
|
random.randint(0x00, 0xff),
|
|
|
|
random.randint(0x00, 0xff)]
|
|
|
|
return ':'.join(map(lambda x: "%02x" % x, mac))
|
2012-02-03 15:29:00 -08:00
|
|
|
|
|
|
|
|
|
|
|
def read_file_as_root(file_path):
|
|
|
|
"""Secure helper to read file as root."""
|
|
|
|
try:
|
|
|
|
out, _err = execute('cat', file_path, run_as_root=True)
|
|
|
|
return out
|
2013-05-03 10:47:39 +10:00
|
|
|
except processutils.ProcessExecutionError:
|
2012-02-03 15:29:00 -08:00
|
|
|
raise exception.FileNotFound(file_path=file_path)
|
2012-02-27 21:01:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def temporary_chown(path, owner_uid=None):
|
|
|
|
"""Temporarily chown a path.
|
|
|
|
|
|
|
|
:params owner_uid: UID of temporary owner (defaults to current user)
|
|
|
|
"""
|
|
|
|
if owner_uid is None:
|
|
|
|
owner_uid = os.getuid()
|
|
|
|
|
|
|
|
orig_uid = os.stat(path).st_uid
|
|
|
|
|
|
|
|
if orig_uid != owner_uid:
|
|
|
|
execute('chown', owner_uid, path, run_as_root=True)
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
if orig_uid != owner_uid:
|
|
|
|
execute('chown', orig_uid, path, run_as_root=True)
|
2012-02-28 05:54:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def tempdir(**kwargs):
|
2013-05-28 10:49:19 -04:00
|
|
|
argdict = kwargs.copy()
|
|
|
|
if 'dir' not in argdict:
|
|
|
|
argdict['dir'] = CONF.tempdir
|
|
|
|
tmpdir = tempfile.mkdtemp(**argdict)
|
2012-02-28 05:54:48 +00:00
|
|
|
try:
|
|
|
|
yield tmpdir
|
|
|
|
finally:
|
|
|
|
try:
|
|
|
|
shutil.rmtree(tmpdir)
|
2013-05-18 00:18:18 +02:00
|
|
|
except OSError as e:
|
2012-05-01 23:21:47 +08:00
|
|
|
LOG.error(_('Could not remove tmpdir: %s'), str(e))
|
2012-02-28 10:55:38 -05:00
|
|
|
|
|
|
|
|
2012-07-10 08:16:27 +01:00
|
|
|
def walk_class_hierarchy(clazz, encountered=None):
|
2013-01-08 17:00:49 -05:00
|
|
|
"""Walk class hierarchy, yielding most derived classes first."""
|
2012-07-10 08:16:27 +01:00
|
|
|
if not encountered:
|
|
|
|
encountered = []
|
|
|
|
for subclass in clazz.__subclasses__():
|
|
|
|
if subclass not in encountered:
|
|
|
|
encountered.append(subclass)
|
|
|
|
# drill down to leaves first
|
|
|
|
for subsubclass in walk_class_hierarchy(subclass, encountered):
|
|
|
|
yield subsubclass
|
|
|
|
yield subclass
|
|
|
|
|
|
|
|
|
2012-02-29 23:38:56 +00:00
|
|
|
class UndoManager(object):
|
|
|
|
"""Provides a mechanism to facilitate rolling back a series of actions
|
|
|
|
when an exception is raised.
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.undo_stack = []
|
|
|
|
|
|
|
|
def undo_with(self, undo_func):
|
|
|
|
self.undo_stack.append(undo_func)
|
|
|
|
|
|
|
|
def _rollback(self):
|
|
|
|
for undo_func in reversed(self.undo_stack):
|
|
|
|
undo_func()
|
|
|
|
|
2012-04-05 21:26:10 +00:00
|
|
|
def rollback_and_reraise(self, msg=None, **kwargs):
|
2012-02-29 23:38:56 +00:00
|
|
|
"""Rollback a series of actions then re-raise the exception.
|
|
|
|
|
2012-03-05 14:33:40 -05:00
|
|
|
.. note:: (sirp) This should only be called within an
|
|
|
|
exception handler.
|
2012-02-29 23:38:56 +00:00
|
|
|
"""
|
2012-05-01 17:31:26 -04:00
|
|
|
with excutils.save_and_reraise_exception():
|
2012-02-29 23:38:56 +00:00
|
|
|
if msg:
|
2012-04-05 21:26:10 +00:00
|
|
|
LOG.exception(msg, **kwargs)
|
2012-02-29 23:38:56 +00:00
|
|
|
|
|
|
|
self._rollback()
|
2012-08-26 21:26:50 +10:00
|
|
|
|
|
|
|
|
2012-10-23 16:00:31 +01:00
|
|
|
def mkfs(fs, path, label=None):
|
|
|
|
"""Format a file or block device
|
|
|
|
|
|
|
|
:param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4'
|
|
|
|
'btrfs', etc.)
|
|
|
|
:param path: Path to file or block device to format
|
|
|
|
:param label: Volume label to use
|
|
|
|
"""
|
|
|
|
if fs == 'swap':
|
2012-11-10 00:03:28 +09:00
|
|
|
args = ['mkswap']
|
2012-10-23 16:00:31 +01:00
|
|
|
else:
|
|
|
|
args = ['mkfs', '-t', fs]
|
2012-11-10 00:03:28 +09:00
|
|
|
#add -F to force no interactive execute on non-block device.
|
|
|
|
if fs in ('ext3', 'ext4'):
|
|
|
|
args.extend(['-F'])
|
|
|
|
if label:
|
|
|
|
if fs in ('msdos', 'vfat'):
|
|
|
|
label_opt = '-n'
|
|
|
|
else:
|
|
|
|
label_opt = '-L'
|
|
|
|
args.extend([label_opt, label])
|
|
|
|
args.append(path)
|
|
|
|
execute(*args)
|
2012-11-21 21:43:12 +11:00
|
|
|
|
|
|
|
|
|
|
|
def last_bytes(file_like_object, num):
|
|
|
|
"""Return num bytes from the end of the file, and remaining byte count.
|
|
|
|
|
|
|
|
:param file_like_object: The file to read
|
|
|
|
:param num: The number of bytes to return
|
|
|
|
|
|
|
|
:returns (data, remaining)
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
file_like_object.seek(-num, os.SEEK_END)
|
2013-05-18 00:18:18 +02:00
|
|
|
except IOError as e:
|
2012-11-21 21:43:12 +11:00
|
|
|
if e.errno == 22:
|
|
|
|
file_like_object.seek(0, os.SEEK_SET)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
|
|
|
remaining = file_like_object.tell()
|
|
|
|
return (file_like_object.read(), remaining)
|
2013-01-23 17:34:13 -05:00
|
|
|
|
|
|
|
|
|
|
|
def metadata_to_dict(metadata):
|
|
|
|
result = {}
|
|
|
|
for item in metadata:
|
2013-01-29 18:53:51 -05:00
|
|
|
if not item.get('deleted'):
|
|
|
|
result[item['key']] = item['value']
|
2013-01-23 17:34:13 -05:00
|
|
|
return result
|
2013-01-15 17:24:49 -05:00
|
|
|
|
|
|
|
|
2013-03-05 13:57:10 -05:00
|
|
|
def dict_to_metadata(metadata):
|
|
|
|
result = []
|
|
|
|
for key, value in metadata.iteritems():
|
|
|
|
result.append(dict(key=key, value=value))
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2013-06-19 12:37:57 +03:00
|
|
|
def instance_meta(instance):
|
|
|
|
if isinstance(instance['metadata'], dict):
|
|
|
|
return instance['metadata']
|
|
|
|
else:
|
|
|
|
return metadata_to_dict(instance['metadata'])
|
|
|
|
|
|
|
|
|
2013-05-10 15:34:00 -07:00
|
|
|
def instance_sys_meta(instance):
|
|
|
|
if isinstance(instance['system_metadata'], dict):
|
|
|
|
return instance['system_metadata']
|
|
|
|
else:
|
|
|
|
return metadata_to_dict(instance['system_metadata'])
|
|
|
|
|
|
|
|
|
2013-01-15 17:24:49 -05:00
|
|
|
def get_wrapped_function(function):
|
|
|
|
"""Get the method at the bottom of a stack of decorators."""
|
|
|
|
if not hasattr(function, 'func_closure') or not function.func_closure:
|
|
|
|
return function
|
|
|
|
|
|
|
|
def _get_wrapped_function(function):
|
|
|
|
if not hasattr(function, 'func_closure') or not function.func_closure:
|
|
|
|
return None
|
|
|
|
|
|
|
|
for closure in function.func_closure:
|
|
|
|
func = closure.cell_contents
|
|
|
|
|
|
|
|
deeper_func = _get_wrapped_function(func)
|
|
|
|
if deeper_func:
|
|
|
|
return deeper_func
|
|
|
|
elif hasattr(closure.cell_contents, '__call__'):
|
|
|
|
return closure.cell_contents
|
|
|
|
|
|
|
|
return _get_wrapped_function(function)
|
|
|
|
|
|
|
|
|
2013-01-30 11:17:33 -08:00
|
|
|
class ExceptionHelper(object):
|
|
|
|
"""Class to wrap another and translate the ClientExceptions raised by its
|
2013-06-08 13:18:51 +02:00
|
|
|
function calls to the actual ones.
|
|
|
|
"""
|
2013-01-30 11:17:33 -08:00
|
|
|
|
|
|
|
def __init__(self, target):
|
|
|
|
self._target = target
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
func = getattr(self._target, name)
|
|
|
|
|
|
|
|
@functools.wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
try:
|
|
|
|
return func(*args, **kwargs)
|
2013-05-18 00:18:18 +02:00
|
|
|
except rpc_common.ClientException as e:
|
2013-01-30 11:17:33 -08:00
|
|
|
raise (e._exc_info[1], None, e._exc_info[2])
|
|
|
|
return wrapper
|
2013-02-12 01:37:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check_string_length(value, name, min_length=0, max_length=None):
|
|
|
|
"""Check the length of specified string
|
|
|
|
:param value: the value of the string
|
|
|
|
:param name: the name of the string
|
|
|
|
:param min_length: the min_length of the string
|
|
|
|
:param max_length: the max_length of the string
|
|
|
|
"""
|
|
|
|
if not isinstance(value, basestring):
|
|
|
|
msg = _("%s is not a string or unicode") % name
|
|
|
|
raise exception.InvalidInput(message=msg)
|
|
|
|
|
2013-02-15 21:32:09 -08:00
|
|
|
if len(value) < min_length:
|
2013-02-12 01:37:27 +00:00
|
|
|
msg = _("%(name)s has less than %(min_length)s "
|
|
|
|
"characters.") % locals()
|
|
|
|
raise exception.InvalidInput(message=msg)
|
|
|
|
|
|
|
|
if max_length and len(value) > max_length:
|
|
|
|
msg = _("%(name)s has more than %(max_length)s "
|
|
|
|
"characters.") % locals()
|
|
|
|
raise exception.InvalidInput(message=msg)
|
2013-05-14 10:49:15 -04:00
|
|
|
|
|
|
|
|
|
|
|
def spawn_n(func, *args, **kwargs):
|
|
|
|
"""Passthrough method for eventlet.spawn_n.
|
|
|
|
|
|
|
|
This utility exists so that it can be stubbed for testing without
|
|
|
|
interfering with the service spawns.
|
|
|
|
"""
|
|
|
|
eventlet.spawn_n(func, *args, **kwargs)
|
2013-06-12 14:59:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
def is_none_string(val):
|
|
|
|
"""
|
|
|
|
Check if a string represents a None value.
|
|
|
|
"""
|
|
|
|
if not isinstance(val, basestring):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return val.lower() == 'none'
|