Rename 'options' variables to 'conf'

In the common-config patch, I'm going to be using 'conf' as the name of
the variable holding configuration values rather than 'options.

This patch does the renaming across the codebase without making any
functional changes.

Change-Id: I3a86fc01fc76825d6d1e86af882eb8245828ce5f
This commit is contained in:
Mark McLoughlin 2011-11-28 14:37:58 +00:00
parent ba44d1c384
commit 002d711fb7
43 changed files with 314 additions and 318 deletions

View File

@ -37,9 +37,9 @@ class Controller(controller.BaseController):
A controller for managing cached images.
"""
def __init__(self, options):
self.options = options
self.cache = image_cache.ImageCache(self.options)
def __init__(self, conf):
self.conf = conf
self.cache = image_cache.ImageCache(self.conf)
def get_cached_images(self, req):
"""
@ -110,8 +110,8 @@ class CachedImageSerializer(wsgi.JSONResponseSerializer):
pass
def create_resource(options):
def create_resource(conf):
"""Cached Images resource factory method"""
deserializer = CachedImageDeserializer()
serializer = CachedImageSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer)
return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -43,9 +43,9 @@ get_images_re = re.compile(r'^(/v\d+)*/images/(.+)$')
class CacheFilter(wsgi.Middleware):
def __init__(self, app, options):
self.options = options
self.cache = image_cache.ImageCache(options)
def __init__(self, app, conf):
self.conf = conf
self.cache = image_cache.ImageCache(conf)
self.serializer = images.ImageSerializer()
logger.info(_("Initialized image cache middleware"))
super(CacheFilter, self).__init__(app)

View File

@ -28,10 +28,10 @@ logger = logging.getLogger(__name__)
class CacheManageFilter(wsgi.Middleware):
def __init__(self, app, options):
def __init__(self, app, conf):
map = app.map
resource = cached_images.create_resource(options)
resource = cached_images.create_resource(conf)
map.connect("/cached_images",
controller=resource,

View File

@ -35,10 +35,10 @@ logger = logging.getLogger('glance.api.middleware.version_negotiation')
class VersionNegotiationFilter(wsgi.Middleware):
def __init__(self, app, options):
self.versions_app = versions.Controller(options)
def __init__(self, app, conf):
self.versions_app = versions.Controller(conf)
self.version_uri_regex = re.compile(r"^v(\d+)\.?(\d+)?")
self.options = options
self.conf = conf
super(VersionNegotiationFilter, self).__init__(app)
def process_request(self, req):

View File

@ -76,11 +76,11 @@ class Controller(controller.BaseController):
DELETE /images/<ID> -- Delete the image with id <ID>
"""
def __init__(self, options):
self.options = options
glance.store.create_stores(options)
self.notifier = notifier.Notifier(options)
registry.configure_registry_client(options)
def __init__(self, conf):
self.conf = conf
glance.store.create_stores(conf)
self.notifier = notifier.Notifier(conf)
registry.configure_registry_client(conf)
def index(self, req):
"""
@ -290,7 +290,7 @@ class Controller(controller.BaseController):
raise HTTPBadRequest(explanation=msg)
store_name = req.headers.get('x-image-meta-store',
self.options['default_store'])
self.conf['default_store'])
store = self.get_store_or_400(req, store_name)
@ -557,7 +557,7 @@ class Controller(controller.BaseController):
# See https://bugs.launchpad.net/glance/+bug/747799
try:
if image['location']:
schedule_delete_from_backend(image['location'], self.options,
schedule_delete_from_backend(image['location'], self.conf,
req.context, id)
registry.delete_image_metadata(req.context, id)
except exception.NotFound, e:
@ -706,8 +706,8 @@ class ImageSerializer(wsgi.JSONResponseSerializer):
return response
def create_resource(options):
def create_resource(conf):
"""Images resource factory method"""
deserializer = ImageDeserializer()
serializer = ImageSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer)
return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -14,8 +14,8 @@ logger = logging.getLogger('glance.api.v1.members')
class Controller(object):
def __init__(self, options):
self.options = options
def __init__(self, conf):
self.conf = conf
def index(self, req, image_id):
"""
@ -158,8 +158,8 @@ class Controller(object):
return dict(shared_images=members)
def create_resource(options):
def create_resource(conf):
"""Image members resource factory method"""
deserializer = wsgi.JSONRequestDeserializer()
serializer = wsgi.JSONResponseSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer)
return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -30,11 +30,11 @@ class API(wsgi.Router):
"""WSGI router for Glance v1 API requests."""
def __init__(self, options):
self.options = options
def __init__(self, conf):
self.conf = conf
mapper = routes.Mapper()
images_resource = images.create_resource(options)
images_resource = images.create_resource(conf)
mapper.resource("image", "images", controller=images_resource,
collection={'detail': 'GET'})
@ -42,7 +42,7 @@ class API(wsgi.Router):
mapper.connect("/images/{id}", controller=images_resource,
action="meta", conditions=dict(method=["HEAD"]))
members_resource = members.create_resource(options)
members_resource = members.create_resource(conf)
mapper.resource("member", "members", controller=members_resource,
parent_resource=dict(member_name='image',

View File

@ -31,8 +31,8 @@ class Controller(object):
A controller that produces information on the Glance API versions.
"""
def __init__(self, options):
self.options = options
def __init__(self, conf):
self.conf = conf
@webob.dec.wsgify
def __call__(self, req):
@ -63,5 +63,5 @@ class Controller(object):
return response
def get_href(self):
return "http://%s:%s/v1/" % (self.options['bind_host'],
self.options['bind_port'])
return "http://%s:%s/v1/" % (self.conf['bind_host'],
self.conf['bind_port'])

View File

@ -53,8 +53,8 @@ class RequestContext(object):
class ContextMiddleware(wsgi.Middleware):
def __init__(self, app, options):
self.options = options
def __init__(self, app, conf):
self.conf = conf
super(ContextMiddleware, self).__init__(app)
def make_context(self, *args, **kwargs):
@ -64,11 +64,11 @@ class ContextMiddleware(wsgi.Middleware):
# Determine the context class to use
ctxcls = RequestContext
if 'context_class' in self.options:
ctxcls = utils.import_class(self.options['context_class'])
if 'context_class' in self.conf:
ctxcls = utils.import_class(self.conf['context_class'])
# Determine whether to use tenant or owner
owner_is_tenant = config.get_option(self.options, 'owner_is_tenant',
owner_is_tenant = config.get_option(self.conf, 'owner_is_tenant',
type='bool', default=True)
kwargs.setdefault('owner_is_tenant', owner_is_tenant)

View File

@ -29,7 +29,7 @@ from glance.common import exception
class NoopStrategy(object):
"""A notifier that does nothing when called."""
def __init__(self, options):
def __init__(self, conf):
pass
def warn(self, msg):
@ -45,7 +45,7 @@ class NoopStrategy(object):
class LoggingStrategy(object):
"""A notifier that calls logging when called."""
def __init__(self, options):
def __init__(self, conf):
self.logger = logging.getLogger('glance.notifier.logging_notifier')
def warn(self, msg):
@ -61,9 +61,9 @@ class LoggingStrategy(object):
class RabbitStrategy(object):
"""A notifier that puts a message on a queue when called."""
def __init__(self, options):
def __init__(self, conf):
"""Initialize the rabbit notification strategy."""
self._options = options
self._conf = conf
host = self._get_option('rabbit_host', 'str', 'localhost')
port = self._get_option('rabbit_port', 'int', 5672)
use_ssl = self._get_option('rabbit_use_ssl', 'bool', False)
@ -84,7 +84,7 @@ class RabbitStrategy(object):
def _get_option(self, name, datatype, default):
"""Retrieve a configuration option."""
return config.get_option(self._options,
return config.get_option(self._conf,
name,
type=datatype,
default=default)
@ -115,11 +115,11 @@ class Notifier(object):
"default": NoopStrategy,
}
def __init__(self, options, strategy=None):
strategy = config.get_option(options, "notifier_strategy",
def __init__(self, conf, strategy=None):
strategy = config.get_option(conf, "notifier_strategy",
type="str", default="default")
try:
self.strategy = self.STRATEGIES[strategy](options)
self.strategy = self.STRATEGIES[strategy](conf)
except KeyError:
raise exception.InvalidNotifierStrategy(strategy=strategy)

View File

@ -34,15 +34,15 @@ class ImageCache(object):
"""Provides an LRU cache for image data."""
def __init__(self, options):
self.options = options
def __init__(self, conf):
self.conf = conf
self.init_driver()
def init_driver(self):
"""
Create the driver for the cache
"""
driver_name = self.options.get('image_cache_driver', 'sqlite')
driver_name = self.conf.get('image_cache_driver', 'sqlite')
driver_module = (__name__ + '.drivers.' + driver_name + '.Driver')
try:
self.driver_class = utils.import_class(driver_module)
@ -64,7 +64,7 @@ class ImageCache(object):
fall back to using the SQLite driver which has no odd dependencies
"""
try:
self.driver = self.driver_class(self.options)
self.driver = self.driver_class(self.conf)
self.driver.configure()
except exception.BadDriverConfiguration, config_err:
driver_module = self.driver_class.__module__
@ -74,7 +74,7 @@ class ImageCache(object):
logger.info(_("Defaulting to SQLite driver."))
default_module = __name__ + '.drivers.sqlite.Driver'
self.driver_class = utils.import_class(default_module)
self.driver = self.driver_class(self.options)
self.driver = self.driver_class(self.conf)
self.driver.configure()
def is_cached(self, image_id):
@ -150,8 +150,8 @@ class ImageCache(object):
size. Returns a tuple containing the total number of cached
files removed and the total size of all pruned image files.
"""
max_size = int(self.options.get('image_cache_max_size',
DEFAULT_MAX_CACHE_SIZE))
max_size = int(self.conf.get('image_cache_max_size',
DEFAULT_MAX_CACHE_SIZE))
current_size = self.driver.get_cache_size()
if max_size > current_size:
logger.debug(_("Image cache has free space, skipping prune..."))

View File

@ -27,9 +27,9 @@ logger = logging.getLogger(__name__)
class Cleaner(object):
def __init__(self, options):
self.options = options
self.cache = ImageCache(options)
def __init__(self, conf):
self.conf = conf
self.cache = ImageCache(conf)
def run(self):
self.cache.clean()

View File

@ -33,15 +33,15 @@ logger = logging.getLogger(__name__)
class Driver(object):
def __init__(self, options):
def __init__(self, conf):
"""
Initialize the attribute driver with a set of options.
:param options: Dictionary of configuration file options
:param conf: Dictionary of configuration options
:raises `exception.BadDriverConfiguration` if configuration of the
driver fails for any reason.
"""
self.options = options or {}
self.conf = conf or {}
def configure(self):
"""
@ -62,7 +62,7 @@ class Driver(object):
try:
key = 'image_cache_dir'
self.base_dir = self.options[key]
self.base_dir = self.conf[key]
except KeyError:
msg = _('Failed to read %s from config') % key
logger.error(msg)

View File

@ -95,7 +95,7 @@ class Driver(base.Driver):
self.initialize_db()
def initialize_db(self):
db = self.options.get('image_cache_sqlite_db', DEFAULT_SQLITE_DB)
db = self.conf.get('image_cache_sqlite_db', DEFAULT_SQLITE_DB)
self.db_path = os.path.join(self.base_dir, db)
try:
conn = sqlite3.connect(self.db_path, check_same_thread=False,
@ -252,8 +252,8 @@ class Driver(base.Driver):
"""
self.delete_invalid_files()
incomplete_stall_time = int(self.options.get('image_cache_stall_time',
DEFAULT_STALL_TIME))
incomplete_stall_time = int(self.conf.get('image_cache_stall_time',
DEFAULT_STALL_TIME))
now = time.time()
older_than = now - incomplete_stall_time
self.delete_stalled_files(older_than)

View File

@ -424,8 +424,8 @@ class Driver(base.Driver):
"""
self.reap_invalid()
incomplete_stall_time = int(self.options.get('image_cache_stall_time',
DEFAULT_STALL_TIME))
incomplete_stall_time = int(self.conf.get('image_cache_stall_time',
DEFAULT_STALL_TIME))
self.reap_stalled(incomplete_stall_time)

View File

@ -42,14 +42,14 @@ logger = logging.getLogger(__name__)
class Prefetcher(object):
def __init__(self, options):
self.options = options
glance.store.create_stores(options)
self.cache = ImageCache(options)
registry.configure_registry_client(options)
def __init__(self, conf):
self.conf = conf
glance.store.create_stores(conf)
self.cache = ImageCache(conf)
registry.configure_registry_client(conf)
def fetch_image_into_cache(self, image_id):
auth_tok = self.options.get('admin_token')
auth_tok = self.conf.get('admin_token')
ctx = context.RequestContext(is_admin=True, show_deleted=True,
auth_tok=auth_tok)
try:

View File

@ -27,9 +27,9 @@ logger = logging.getLogger(__name__)
class Pruner(object):
def __init__(self, options):
self.options = options
self.cache = ImageCache(options)
def __init__(self, conf):
self.conf = conf
self.cache = ImageCache(conf)
def run(self):
self.cache.prune()

View File

@ -35,13 +35,13 @@ logger = logging.getLogger(__name__)
class Queuer(object):
def __init__(self, options):
self.options = options
self.cache = ImageCache(options)
registry.configure_registry_client(options)
def __init__(self, conf):
self.conf = conf
self.cache = ImageCache(conf)
registry.configure_registry_client(conf)
def queue_image(self, image_id):
auth_tok = self.options.get('admin_token')
auth_tok = self.conf.get('admin_token')
ctx = context.RequestContext(is_admin=True, show_deleted=True,
auth_tok=auth_tok)
try:

View File

@ -34,16 +34,16 @@ _CLIENT_KWARGS = {}
_METADATA_ENCRYPTION_KEY = None
def configure_registry_client(options):
def configure_registry_client(conf):
"""
Sets up a registry client for use in registry lookups
:param options: Configuration options coming from controller
:param conf: Configuration options coming from controller
"""
global _CLIENT_KWARGS, _CLIENT_HOST, _CLIENT_PORT, _METADATA_ENCRYPTION_KEY
try:
host = options['registry_host']
port = int(options['registry_port'])
host = conf['registry_host']
port = int(conf['registry_port'])
except (TypeError, ValueError):
msg = _("Configuration option was not valid")
logger.error(msg)
@ -53,12 +53,12 @@ def configure_registry_client(options):
logger.error(msg)
raise exception.BadRegistryConnectionConfiguration(msg)
use_ssl = config.get_option(options, 'registry_client_protocol',
use_ssl = config.get_option(conf, 'registry_client_protocol',
default='http').lower() == 'https'
key_file = options.get('registry_client_key_file')
cert_file = options.get('registry_client_cert_file')
ca_file = options.get('registry_client_ca_file')
_METADATA_ENCRYPTION_KEY = options.get('metadata_encryption_key')
key_file = conf.get('registry_client_key_file')
cert_file = conf.get('registry_client_cert_file')
ca_file = conf.get('registry_client_ca_file')
_METADATA_ENCRYPTION_KEY = conf.get('metadata_encryption_key')
_CLIENT_HOST = host
_CLIENT_PORT = port
_CLIENT_KWARGS = {'use_ssl': use_ssl,

View File

@ -25,15 +25,15 @@ from glance.common import wsgi
class API(wsgi.Router):
"""WSGI entry point for all Registry requests."""
def __init__(self, options):
def __init__(self, conf):
mapper = routes.Mapper()
images_resource = images.create_resource(options)
images_resource = images.create_resource(conf)
mapper.resource("image", "images", controller=images_resource,
collection={'detail': 'GET'})
mapper.connect("/", controller=images_resource, action="index")
members_resource = members.create_resource(options)
members_resource = members.create_resource(conf)
mapper.resource("member", "members", controller=members_resource,
parent_resource=dict(member_name='image',
collection_name='images'))

View File

@ -49,9 +49,9 @@ SUPPORTED_PARAMS = ('limit', 'marker', 'sort_key', 'sort_dir')
class Controller(object):
def __init__(self, options):
self.options = options
db_api.configure_db(options)
def __init__(self, conf):
self.conf = conf
db_api.configure_db(conf)
def _get_images(self, context, **params):
"""
@ -181,7 +181,7 @@ class Controller(object):
def _get_limit(self, req):
"""Parse a limit query param into something usable."""
try:
default = self.options['limit_param_default']
default = self.conf['limit_param_default']
except KeyError:
# if no value is configured, provide a sane default
default = 25
@ -198,7 +198,7 @@ class Controller(object):
raise exc.HTTPBadRequest(_("limit param must be positive"))
try:
api_limit_max = int(self.options['api_limit_max'])
api_limit_max = int(self.conf['api_limit_max'])
except (KeyError, ValueError):
api_limit_max = 1000
msg = _("Failed to read api_limit_max from config. "
@ -417,8 +417,8 @@ def make_image_dict(image):
return image_dict
def create_resource(options):
def create_resource(conf):
"""Images resource factory method."""
deserializer = wsgi.JSONRequestDeserializer()
serializer = wsgi.JSONResponseSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer)
return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -29,9 +29,9 @@ logger = logging.getLogger('glance.registry.api.v1.members')
class Controller(object):
def __init__(self, options):
self.options = options
db_api.configure_db(options)
def __init__(self, conf):
self.conf = conf
db_api.configure_db(conf)
def index(self, req, image_id):
"""
@ -296,8 +296,8 @@ def make_member_list(members, **attr_map):
if not memb.deleted]
def create_resource(options):
def create_resource(conf):
"""Image members resource factory method."""
deserializer = wsgi.JSONRequestDeserializer()
serializer = wsgi.JSONResponseSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer)
return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -20,7 +20,7 @@
import optparse
def add_options(parser):
def add_conf(parser):
"""
Adds any configuration options that the db layer might have.

View File

@ -58,22 +58,22 @@ STATUSES = ['active', 'saving', 'queued', 'killed', 'pending_delete',
'deleted']
def configure_db(options):
def configure_db(conf):
"""
Establish the database, create an engine if needed, and
register the models.
:param options: Mapping of configuration options
:param conf: Mapping of configuration options
"""
global _ENGINE, sa_logger, logger
if not _ENGINE:
debug = config.get_option(
options, 'debug', type='bool', default=False)
conf, 'debug', type='bool', default=False)
verbose = config.get_option(
options, 'verbose', type='bool', default=False)
conf, 'verbose', type='bool', default=False)
timeout = config.get_option(
options, 'sql_idle_timeout', type='int', default=3600)
sql_connection = config.get_option(options, 'sql_connection')
conf, 'sql_idle_timeout', type='int', default=3600)
sql_connection = config.get_option(conf, 'sql_connection')
try:
_ENGINE = create_engine(sql_connection, pool_recycle=timeout)
except Exception, err:

View File

@ -32,15 +32,15 @@ from glance.common import exception
logger = logging.getLogger('glance.registry.db.migration')
def db_version(options):
def db_version(conf):
"""
Return the database's current migration number
:param options: options dict
:param conf: conf dict
:retval version number
"""
repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection']
sql_connection = conf['sql_connection']
try:
return versioning_api.db_version(sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError, e:
@ -49,78 +49,78 @@ def db_version(options):
raise exception.DatabaseMigrationError(msg)
def upgrade(options, version=None):
def upgrade(conf, version=None):
"""
Upgrade the database's current migration level
:param options: options dict
:param conf: conf dict
:param version: version to upgrade (defaults to latest)
:retval version number
"""
db_version(options) # Ensure db is under migration control
db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection']
sql_connection = conf['sql_connection']
version_str = version or 'latest'
logger.info(_("Upgrading %(sql_connection)s to version %(version_str)s") %
locals())
return versioning_api.upgrade(sql_connection, repo_path, version)
def downgrade(options, version):
def downgrade(conf, version):
"""
Downgrade the database's current migration level
:param options: options dict
:param conf: conf dict
:param version: version to downgrade to
:retval version number
"""
db_version(options) # Ensure db is under migration control
db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection']
sql_connection = conf['sql_connection']
logger.info(_("Downgrading %(sql_connection)s to version %(version)s") %
locals())
return versioning_api.downgrade(sql_connection, repo_path, version)
def version_control(options):
def version_control(conf):
"""
Place a database under migration control
:param options: options dict
:param conf: conf dict
"""
sql_connection = options['sql_connection']
sql_connection = conf['sql_connection']
try:
_version_control(options)
_version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e:
msg = (_("database '%(sql_connection)s' is already under migration "
"control") % locals())
raise exception.DatabaseMigrationError(msg)
def _version_control(options):
def _version_control(conf):
"""
Place a database under migration control
:param options: options dict
:param conf: conf dict
"""
repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection']
sql_connection = conf['sql_connection']
return versioning_api.version_control(sql_connection, repo_path)
def db_sync(options, version=None):
def db_sync(conf, version=None):
"""
Place a database under migration control and perform an upgrade
:param options: options dict
:param conf: conf dict
:retval version number
"""
try:
_version_control(options)
_version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e:
pass
upgrade(options, version=version)
upgrade(conf, version=version)
def get_migrate_repo_path():

View File

@ -87,7 +87,7 @@ def register_store(store_module, schemes):
location.register_scheme_map(scheme_map)
def create_stores(options):
def create_stores(conf):
"""
Construct the store objects with supplied configuration options
"""
@ -98,7 +98,7 @@ def create_stores(options):
raise BackendException('Unable to create store. Could not find '
'a class named Store in module %s.'
% store_module)
STORES[store_module] = store_class(options)
STORES[store_module] = store_class(conf)
def get_store_from_scheme(scheme):
@ -154,11 +154,11 @@ def get_store_from_location(uri):
return loc.store_name
def schedule_delete_from_backend(uri, options, context, image_id, **kwargs):
def schedule_delete_from_backend(uri, conf, context, image_id, **kwargs):
"""
Given a uri and a time, schedule the deletion of an image.
"""
use_delay = config.get_option(options, 'delayed_delete', type='bool',
use_delay = config.get_option(conf, 'delayed_delete', type='bool',
default=False)
if not use_delay:
registry.update_image_metadata(context, image_id,
@ -169,8 +169,8 @@ def schedule_delete_from_backend(uri, options, context, image_id, **kwargs):
msg = _("Failed to delete image from store (%(uri)s).") % locals()
logger.error(msg)
datadir = config.get_option(options, 'scrubber_datadir')
scrub_time = config.get_option(options, 'scrub_time', type='int',
datadir = config.get_option(conf, 'scrubber_datadir')
scrub_time = config.get_option(conf, 'scrub_time', type='int',
default=0)
delete_time = time.time() + scrub_time
file_path = os.path.join(datadir, str(image_id))

View File

@ -28,13 +28,13 @@ class Store(object):
CHUNKSIZE = (16 * 1024 * 1024) # 16M
def __init__(self, options=None):
def __init__(self, conf=None):
"""
Initialize the Store
:param options: Optional dictionary of configuration options
:param conf: Optional dictionary of configuration options
"""
self.options = options or {}
self.conf = conf or {}
self.configure()

View File

@ -115,7 +115,7 @@ class Store(glance.store.base.Store):
reason=reason)
def _option_get(self, param):
result = self.options.get(param)
result = self.conf.get(param)
if not result:
reason = _("Could not find %s in configuration options.") % param
logger.error(reason)

View File

@ -109,16 +109,16 @@ class Store(glance.store.base.Store):
"""
try:
self.chunk_size = int(
self.options.get(
self.conf.get(
'rbd_store_chunk_size',
DEFAULT_CHUNKSIZE)) * 1024 * 1024
# these must not be unicode since they will be passed to a
# non-unicode-aware C library
self.pool = str(self.options.get('rbd_store_pool',
self.pool = str(self.conf.get('rbd_store_pool',
DEFAULT_POOL))
self.user = str(self.options.get('rbd_store_user',
self.user = str(self.conf.get('rbd_store_user',
DEFAULT_USER))
self.conf_file = str(self.options.get('rbd_store_ceph_conf',
self.conf_file = str(self.conf.get('rbd_store_ceph_conf',
DEFAULT_CONFFILE))
except Exception, e:
reason = _("Error in store configuration: %s") % e

View File

@ -210,14 +210,14 @@ class Store(glance.store.base.Store):
else: # Defaults http
self.full_s3_host = 'http://' + self.s3_host
if self.options.get('s3_store_object_buffer_dir'):
self.s3_store_object_buffer_dir = self.options.get(
if self.conf.get('s3_store_object_buffer_dir'):
self.s3_store_object_buffer_dir = self.conf.get(
's3_store_object_buffer_dir')
else:
self.s3_store_object_buffer_dir = None
def _option_get(self, param):
result = self.options.get(param)
result = self.conf.get(param)
if not result:
reason = _("Could not find %(param)s in configuration "
"options.") % locals()
@ -297,7 +297,7 @@ class Store(glance.store.base.Store):
host=loc.s3serviceurl,
is_secure=(loc.scheme == 's3+https'))
create_bucket_if_missing(self.bucket, s3_conn, self.options)
create_bucket_if_missing(self.bucket, s3_conn, self.conf)
bucket_obj = get_bucket(s3_conn, self.bucket)
obj_name = str(image_id)
@ -403,21 +403,21 @@ def get_bucket(conn, bucket_id):
return bucket
def create_bucket_if_missing(bucket, s3_conn, options):
def create_bucket_if_missing(bucket, s3_conn, conf):
"""
Creates a missing bucket in S3 if the
``s3_store_create_bucket_on_put`` option is set.
:param bucket: Name of bucket to create
:param s3_conn: Connection to S3
:param options: Option mapping
:param conf: Option mapping
"""
from boto.exception import S3ResponseError
try:
s3_conn.get_bucket(bucket)
except S3ResponseError, e:
if e.status == httplib.NOT_FOUND:
add_bucket = config.get_option(options,
add_bucket = config.get_option(conf,
's3_store_create_bucket_on_put',
type='bool', default=False)
if add_bucket:

View File

@ -65,23 +65,23 @@ class Daemon(object):
class Scrubber(object):
CLEANUP_FILE = ".cleanup"
def __init__(self, options):
logger.info(_("Initializing scrubber with options: %s") % options)
self.options = options
self.datadir = config.get_option(options, 'scrubber_datadir')
self.cleanup = config.get_option(options, 'cleanup_scrubber',
def __init__(self, conf):
logger.info(_("Initializing scrubber with conf: %s") % conf)
self.conf = conf
self.datadir = config.get_option(conf, 'scrubber_datadir')
self.cleanup = config.get_option(conf, 'cleanup_scrubber',
type='bool', default=False)
host = config.get_option(options, 'registry_host')
port = config.get_option(options, 'registry_port', type='int')
host = config.get_option(conf, 'registry_host')
port = config.get_option(conf, 'registry_port', type='int')
self.registry = client.RegistryClient(host, port)
utils.safe_mkdirs(self.datadir)
if self.cleanup:
self.cleanup_time = config.get_option(options,
self.cleanup_time = config.get_option(conf,
'cleanup_scrubber_time',
type='int', default=86400)
store.create_stores(options)
store.create_stores(conf)
def run(self, pool, event=None):
now = time.time()

View File

@ -187,7 +187,7 @@ class Store(glance.store.base.Store):
def configure(self):
self.snet = config.get_option(
self.options, 'swift_enable_snet', type='bool', default=False)
self.conf, 'swift_enable_snet', type='bool', default=False)
def configure_add(self):
"""
@ -199,30 +199,30 @@ class Store(glance.store.base.Store):
self.auth_address = self._option_get('swift_store_auth_address')
self.user = self._option_get('swift_store_user')
self.key = self._option_get('swift_store_key')
self.container = self.options.get('swift_store_container',
self.container = self.conf.get('swift_store_container',
DEFAULT_CONTAINER)
try:
if self.options.get('swift_store_large_object_size'):
if self.conf.get('swift_store_large_object_size'):
self.large_object_size = int(
self.options.get('swift_store_large_object_size')
self.conf.get('swift_store_large_object_size')
) * (1024 * 1024) # Size specified in MB in conf files
else:
self.large_object_size = DEFAULT_LARGE_OBJECT_SIZE
if self.options.get('swift_store_large_object_chunk_size'):
if self.conf.get('swift_store_large_object_chunk_size'):
self.large_object_chunk_size = int(
self.options.get('swift_store_large_object_chunk_size')
self.conf.get('swift_store_large_object_chunk_size')
) * (1024 * 1024) # Size specified in MB in conf files
else:
self.large_object_chunk_size = DEFAULT_LARGE_OBJECT_CHUNK_SIZE
if self.options.get('swift_store_object_buffer_dir'):
if self.conf.get('swift_store_object_buffer_dir'):
self.swift_store_object_buffer_dir = (
self.options.get('swift_store_object_buffer_dir'))
self.conf.get('swift_store_object_buffer_dir'))
else:
self.swift_store_object_buffer_dir = None
except Exception, e:
reason = _("Error in configuration options: %s") % e
reason = _("Error in configuration conf: %s") % e
logger.error(reason)
raise exception.BadStoreConfiguration(store_name="swift",
reason=reason)
@ -283,7 +283,7 @@ class Store(glance.store.base.Store):
authurl=auth_url, user=user, key=key, snet=snet)
def _option_get(self, param):
result = self.options.get(param)
result = self.conf.get(param)
if not result:
reason = (_("Could not find %(param)s in configuration "
"options.") % locals())
@ -330,7 +330,7 @@ class Store(glance.store.base.Store):
swift_conn = self._make_swift_connection(
auth_url=self.full_auth_address, user=self.user, key=self.key)
create_container_if_missing(self.container, swift_conn, self.options)
create_container_if_missing(self.container, swift_conn, self.conf)
obj_name = str(image_id)
location = StoreLocation({'scheme': self.scheme,
@ -482,20 +482,20 @@ class Store(glance.store.base.Store):
raise
def create_container_if_missing(container, swift_conn, options):
def create_container_if_missing(container, swift_conn, conf):
"""
Creates a missing container in Swift if the
``swift_store_create_container_on_put`` option is set.
:param container: Name of container to create
:param swift_conn: Connection to Swift
:param options: Option mapping
:param conf: Option mapping
"""
try:
swift_conn.head_container(container)
except swift_client.ClientException, e:
if e.http_status == httplib.NOT_FOUND:
add_container = config.get_option(options,
add_container = config.get_option(conf,
'swift_store_create_container_on_put',
type='bool', default=False)
if add_container:

View File

@ -97,9 +97,9 @@ def stub_out_registry_and_store_server(stubs):
sql_connection = os.environ.get('GLANCE_SQL_CONNECTION',
"sqlite://")
context_class = 'glance.registry.context.RequestContext'
options = {'sql_connection': sql_connection, 'verbose': VERBOSE,
'debug': DEBUG, 'context_class': context_class}
api = context.ContextMiddleware(rserver.API(options), options)
conf = {'sql_connection': sql_connection, 'verbose': VERBOSE,
'debug': DEBUG, 'context_class': context_class}
api = context.ContextMiddleware(rserver.API(conf), conf)
res = self.req.get_response(api)
# httplib.Response has a read() method...fake it out
@ -145,17 +145,17 @@ def stub_out_registry_and_store_server(stubs):
self.req.body = body
def getresponse(self):
options = {'verbose': VERBOSE,
'debug': DEBUG,
'bind_host': '0.0.0.0',
'bind_port': '9999999',
'registry_host': '0.0.0.0',
'registry_port': '9191',
'default_store': 'file',
'filesystem_store_datadir': FAKE_FILESYSTEM_ROOTDIR}
conf = {'verbose': VERBOSE,
'debug': DEBUG,
'bind_host': '0.0.0.0',
'bind_port': '9999999',
'registry_host': '0.0.0.0',
'registry_port': '9191',
'default_store': 'file',
'filesystem_store_datadir': FAKE_FILESYSTEM_ROOTDIR}
api = version_negotiation.VersionNegotiationFilter(
context.ContextMiddleware(router.API(options), options),
options)
context.ContextMiddleware(router.API(conf), conf),
conf)
res = self.req.get_response(api)
# httplib.Response has a read() method...fake it out
@ -218,9 +218,9 @@ def stub_out_registry_server(stubs, **kwargs):
def getresponse(self):
sql_connection = kwargs.get('sql_connection', "sqlite:///")
context_class = 'glance.registry.context.RequestContext'
options = {'sql_connection': sql_connection, 'verbose': VERBOSE,
'debug': DEBUG, 'context_class': context_class}
api = context.ContextMiddleware(rserver.API(options), options)
conf = {'sql_connection': sql_connection, 'verbose': VERBOSE,
'debug': DEBUG, 'context_class': context_class}
api = context.ContextMiddleware(rserver.API(conf), conf)
res = self.req.get_response(api)
# httplib.Response has a read() method...fake it out

View File

@ -42,14 +42,14 @@ UUID1 = _gen_uuid()
UUID2 = _gen_uuid()
OPTIONS = {'sql_connection': 'sqlite://',
'verbose': False,
'debug': False,
'registry_host': '0.0.0.0',
'registry_port': '9191',
'default_store': 'file',
'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR,
'context_class': 'glance.registry.context.RequestContext'}
CONF = {'sql_connection': 'sqlite://',
'verbose': False,
'debug': False,
'registry_host': '0.0.0.0',
'registry_port': '9191',
'default_store': 'file',
'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR,
'context_class': 'glance.registry.context.RequestContext'}
class TestRegistryDb(unittest.TestCase):
@ -64,15 +64,14 @@ class TestRegistryDb(unittest.TestCase):
API controller results in a) an Exception being thrown and b)
a message being logged to the registry log file...
"""
bad_options = {'verbose': True,
'debug': True,
'sql_connection': 'baddriver:///'}
bad_conf = {'verbose': True,
'debug': True,
'sql_connection': 'baddriver:///'}
# We set this to None to trigger a reconfigure, otherwise
# other modules may have already correctly configured the DB
orig_engine = db_api._ENGINE
db_api._ENGINE = None
self.assertRaises(ImportError, db_api.configure_db,
bad_options)
self.assertRaises(ImportError, db_api.configure_db, bad_conf)
exc_raised = False
self.log_written = False
@ -82,7 +81,7 @@ class TestRegistryDb(unittest.TestCase):
self.stubs.Set(db_api.logger, 'error', fake_log_error)
try:
api_obj = rserver.API(bad_options)
api_obj = rserver.API(bad_conf)
except ImportError:
exc_raised = True
finally:
@ -102,7 +101,7 @@ class TestRegistryAPI(unittest.TestCase):
self.stubs = stubout.StubOutForTesting()
stubs.stub_out_registry_and_store_server(self.stubs)
stubs.stub_out_filesystem_backend()
self.api = context.ContextMiddleware(rserver.API(OPTIONS), OPTIONS)
self.api = context.ContextMiddleware(rserver.API(CONF), CONF)
self.FIXTURES = [
{'id': UUID1,
'name': 'fake image #1',
@ -137,7 +136,7 @@ class TestRegistryAPI(unittest.TestCase):
'location': "file:///tmp/glance-tests/2",
'properties': {}}]
self.context = rcontext.RequestContext(is_admin=True)
db_api.configure_db(OPTIONS)
db_api.configure_db(CONF)
self.destroy_fixtures()
self.create_fixtures()
@ -1936,7 +1935,7 @@ class TestGlanceAPI(unittest.TestCase):
stubs.stub_out_registry_and_store_server(self.stubs)
stubs.stub_out_filesystem_backend()
sql_connection = os.environ.get('GLANCE_SQL_CONNECTION', "sqlite://")
self.api = context.ContextMiddleware(router.API(OPTIONS), OPTIONS)
self.api = context.ContextMiddleware(router.API(CONF), CONF)
self.FIXTURES = [
{'id': UUID1,
'name': 'fake image #1',
@ -1967,7 +1966,7 @@ class TestGlanceAPI(unittest.TestCase):
'location': "file:///tmp/glance-tests/2",
'properties': {}}]
self.context = rcontext.RequestContext(is_admin=True)
db_api.configure_db(OPTIONS)
db_api.configure_db(CONF)
self.destroy_fixtures()
self.create_fixtures()

View File

@ -35,7 +35,7 @@ from glance.registry import client as rclient
from glance.registry import context as rcontext
from glance.tests import stubs
OPTIONS = {'sql_connection': 'sqlite://'}
CONF = {'sql_connection': 'sqlite://'}
_gen_uuid = utils.generate_uuid
@ -138,7 +138,7 @@ class TestRegistryClient(unittest.TestCase):
"""Establish a clean test environment"""
self.stubs = stubout.StubOutForTesting()
stubs.stub_out_registry_and_store_server(self.stubs)
db_api.configure_db(OPTIONS)
db_api.configure_db(CONF)
self.context = rcontext.RequestContext(is_admin=True)
self.FIXTURES = [
{'id': UUID1,
@ -1138,7 +1138,7 @@ class TestClient(unittest.TestCase):
self.stubs = stubout.StubOutForTesting()
stubs.stub_out_registry_and_store_server(self.stubs)
stubs.stub_out_filesystem_backend()
db_api.configure_db(OPTIONS)
db_api.configure_db(CONF)
self.client = client.Client("0.0.0.0")
self.FIXTURES = [
{'id': UUID1,

View File

@ -48,21 +48,21 @@ class TestOptionParsing(unittest.TestCase):
# of typed values
parser = optparse.OptionParser()
config.add_common_options(parser)
parsed_options, args = config.parse_options(parser, [])
parsed_conf, args = config.parse_options(parser, [])
expected_options = {'verbose': False, 'debug': False,
'config_file': None}
self.assertEquals(expected_options, parsed_options)
expected_conf = {'verbose': False, 'debug': False,
'config_file': None}
self.assertEquals(expected_conf, parsed_conf)
# test non-empty args and that parse_options() returns a mapping
# of typed values matching supplied args
parser = optparse.OptionParser()
config.add_common_options(parser)
parsed_options, args = config.parse_options(parser, ['--verbose'])
parsed_conf, args = config.parse_options(parser, ['--verbose'])
expected_options = {'verbose': True, 'debug': False,
'config_file': None}
self.assertEquals(expected_options, parsed_options)
expected_conf = {'verbose': True, 'debug': False,
'config_file': None}
self.assertEquals(expected_conf, parsed_conf)
# test non-empty args that contain unknown options raises
# a SystemExit exception. Not ideal, but unfortunately optparse

View File

@ -29,7 +29,7 @@ from glance.store.location import get_location_from_uri
from glance.store.filesystem import Store, ChunkedFile
from glance.tests import stubs
FILESYSTEM_OPTIONS = {
FILESYSTEM_CONF = {
'verbose': True,
'debug': True,
'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR}
@ -43,7 +43,7 @@ class TestStore(unittest.TestCase):
stubs.stub_out_filesystem_backend()
self.orig_chunksize = ChunkedFile.CHUNKSIZE
ChunkedFile.CHUNKSIZE = 10
self.store = Store(FILESYSTEM_OPTIONS)
self.store = Store(FILESYSTEM_CONF)
def tearDown(self):
"""Clear the test environment"""
@ -116,9 +116,6 @@ class TestStore(unittest.TestCase):
raises an appropriate exception
"""
image_file = StringIO.StringIO("nevergonnamakeit")
options = {'verbose': True,
'debug': True,
'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR}
self.assertRaises(exception.Duplicate,
self.store.add,
'2', image_file, 0)

View File

@ -136,7 +136,7 @@ class ImageCacheTestCase(object):
self.assertTrue(os.path.exists(incomplete_file_path))
self.cache.options['image_cache_stall_time'] = 0
self.cache.conf['image_cache_stall_time'] = 0
self.cache.clean()
self.assertFalse(os.path.exists(incomplete_file_path))
@ -250,12 +250,12 @@ class TestImageCacheXattr(unittest.TestCase,
self.inited = True
self.disabled = False
self.options = {'image_cache_dir': self.cache_dir,
'image_cache_driver': 'xattr',
'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0',
'registry_port': 9191}
self.cache = image_cache.ImageCache(self.options)
self.conf = {'image_cache_dir': self.cache_dir,
'image_cache_driver': 'xattr',
'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0',
'registry_port': 9191}
self.cache = image_cache.ImageCache(self.conf)
if not xattr_writes_supported(self.cache_dir):
self.inited = True
@ -294,12 +294,12 @@ class TestImageCacheSqlite(unittest.TestCase,
self.disabled = False
self.cache_dir = os.path.join("/", "tmp", "test.cache.%d" %
random.randint(0, 1000000))
self.options = {'image_cache_dir': self.cache_dir,
'image_cache_driver': 'sqlite',
'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0',
'registry_port': 9191}
self.cache = image_cache.ImageCache(self.options)
self.conf = {'image_cache_dir': self.cache_dir,
'image_cache_driver': 'sqlite',
'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0',
'registry_port': 9191}
self.cache = image_cache.ImageCache(self.conf)
def tearDown(self):
if os.path.exists(self.cache_dir):

View File

@ -124,10 +124,10 @@ class TestMigrations(unittest.TestCase):
that there are no errors in the version scripts for each engine
"""
for key, engine in self.engines.items():
options = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
self._walk_versions(options)
conf = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
self._walk_versions(conf)
def _walk_versions(self, options):
def _walk_versions(self, conf):
# Determine latest version script from the repo, then
# upgrade from 1 through to the latest, with no data
# in the databases. This just checks that the schema itself
@ -136,24 +136,24 @@ class TestMigrations(unittest.TestCase):
# Assert we are not under version control...
self.assertRaises(exception.DatabaseMigrationError,
migration_api.db_version,
options)
conf)
# Place the database under version control
migration_api.version_control(options)
migration_api.version_control(conf)
cur_version = migration_api.db_version(options)
cur_version = migration_api.db_version(conf)
self.assertEqual(0, cur_version)
for version in xrange(1, TestMigrations.REPOSITORY.latest + 1):
migration_api.upgrade(options, version)
cur_version = migration_api.db_version(options)
migration_api.upgrade(conf, version)
cur_version = migration_api.db_version(conf)
self.assertEqual(cur_version, version)
# Now walk it back down to 0 from the latest, testing
# the downgrade paths.
for version in reversed(
xrange(0, TestMigrations.REPOSITORY.latest)):
migration_api.downgrade(options, version)
cur_version = migration_api.db_version(options)
migration_api.downgrade(conf, version)
cur_version = migration_api.db_version(conf)
self.assertEqual(cur_version, version)
def test_no_data_loss_2_to_3_to_2(self):
@ -165,14 +165,14 @@ class TestMigrations(unittest.TestCase):
the image_properties table back into the base image table.
"""
for key, engine in self.engines.items():
options = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
self._no_data_loss_2_to_3_to_2(engine, options)
conf = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
self._no_data_loss_2_to_3_to_2(engine, conf)
def _no_data_loss_2_to_3_to_2(self, engine, options):
migration_api.version_control(options)
migration_api.upgrade(options, 2)
def _no_data_loss_2_to_3_to_2(self, engine, conf):
migration_api.version_control(conf)
migration_api.upgrade(conf, 2)
cur_version = migration_api.db_version(options)
cur_version = migration_api.db_version(conf)
self.assertEquals(2, cur_version)
# We are now on version 2. Check that the images table does
@ -214,9 +214,9 @@ class TestMigrations(unittest.TestCase):
# Now let's upgrade to 3. This should move the type column
# to the image_properties table as type properties.
migration_api.upgrade(options, 3)
migration_api.upgrade(conf, 3)
cur_version = migration_api.db_version(options)
cur_version = migration_api.db_version(conf)
self.assertEquals(3, cur_version)
images_table = Table('images', MetaData(), autoload=True,
@ -240,7 +240,7 @@ class TestMigrations(unittest.TestCase):
# Downgrade to 2 and check that the type properties were moved
# to the main image table
migration_api.downgrade(options, 2)
migration_api.downgrade(conf, 2)
images_table = Table('images', MetaData(), autoload=True,
autoload_with=engine)

View File

@ -26,20 +26,20 @@ class TestInvalidNotifier(unittest.TestCase):
"""Test that notifications are generated appropriately"""
def test_cannot_create(self):
options = {"notifier_strategy": "invalid_notifier"}
conf = {"notifier_strategy": "invalid_notifier"}
self.assertRaises(exception.InvalidNotifierStrategy,
notifier.Notifier,
options)
conf)
class TestLoggingNotifier(unittest.TestCase):
"""Test the logging notifier is selected and works properly."""
def setUp(self):
options = {"notifier_strategy": "logging"}
conf = {"notifier_strategy": "logging"}
self.called = False
self.logger = logging.getLogger("glance.notifier.logging_notifier")
self.notifier = notifier.Notifier(options)
self.notifier = notifier.Notifier(conf)
def _called(self, msg):
self.called = msg
@ -67,8 +67,8 @@ class TestNoopNotifier(unittest.TestCase):
"""Test that the noop notifier works...and does nothing?"""
def setUp(self):
options = {"notifier_strategy": "noop"}
self.notifier = notifier.Notifier(options)
conf = {"notifier_strategy": "noop"}
self.notifier = notifier.Notifier(conf)
def test_warn(self):
self.notifier.warn("test_event", "test_message")
@ -86,8 +86,8 @@ class TestRabbitNotifier(unittest.TestCase):
def setUp(self):
notifier.RabbitStrategy._send_message = self._send_message
self.called = False
options = {"notifier_strategy": "rabbit"}
self.notifier = notifier.Notifier(options)
conf = {"notifier_strategy": "rabbit"}
self.notifier = notifier.Notifier(conf)
def _send_message(self, message, priority):
self.called = {

View File

@ -37,12 +37,12 @@ from glance.store.s3 import Store
FAKE_UUID = utils.generate_uuid()
FIVE_KB = (5 * 1024)
S3_OPTIONS = {'verbose': True,
'debug': True,
's3_store_access_key': 'user',
's3_store_secret_key': 'key',
's3_store_host': 'localhost:8080',
's3_store_bucket': 'glance'}
S3_CONF = {'verbose': True,
'debug': True,
's3_store_access_key': 'user',
's3_store_secret_key': 'key',
's3_store_host': 'localhost:8080',
's3_store_bucket': 'glance'}
# We stub out as little as possible to ensure that the code paths
@ -163,7 +163,7 @@ class TestStore(unittest.TestCase):
"""Establish a clean test environment"""
self.stubs = stubout.StubOutForTesting()
stub_out_s3(self.stubs)
self.store = Store(S3_OPTIONS)
self.store = Store(S3_CONF)
def tearDown(self):
"""Clear the test environment"""
@ -204,10 +204,10 @@ class TestStore(unittest.TestCase):
expected_s3_contents = "*" * expected_s3_size
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_OPTIONS['s3_store_access_key'],
S3_OPTIONS['s3_store_secret_key'],
S3_OPTIONS['s3_store_host'],
S3_OPTIONS['s3_store_bucket'],
S3_CONF['s3_store_access_key'],
S3_CONF['s3_store_secret_key'],
S3_CONF['s3_store_host'],
S3_CONF['s3_store_bucket'],
expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents)
@ -250,17 +250,17 @@ class TestStore(unittest.TestCase):
expected_s3_contents = "*" * expected_s3_size
expected_checksum = \
hashlib.md5(expected_s3_contents).hexdigest()
new_options = S3_OPTIONS.copy()
new_options['s3_store_host'] = variation
new_conf = S3_CONF.copy()
new_conf['s3_store_host'] = variation
expected_location = format_s3_location(
new_options['s3_store_access_key'],
new_options['s3_store_secret_key'],
new_options['s3_store_host'],
new_options['s3_store_bucket'],
new_conf['s3_store_access_key'],
new_conf['s3_store_secret_key'],
new_conf['s3_store_host'],
new_conf['s3_store_bucket'],
expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents)
self.store = Store(new_options)
self.store = Store(new_conf)
location, size, checksum = self.store.add(expected_image_id,
image_s3,
expected_s3_size)
@ -288,11 +288,11 @@ class TestStore(unittest.TestCase):
FAKE_UUID, image_s3, 0)
def _option_required(self, key):
options = S3_OPTIONS.copy()
del options[key]
conf = S3_CONF.copy()
del conf[key]
try:
self.store = Store(options)
self.store = Store(conf)
return self.store.add == self.store.add_disabled
except:
return False

View File

@ -38,12 +38,12 @@ Store = glance.store.swift.Store
FIVE_KB = (5 * 1024)
FIVE_GB = (5 * 1024 * 1024 * 1024)
MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_OPTIONS = {'verbose': True,
'debug': True,
'swift_store_user': 'user',
'swift_store_key': 'key',
'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance'}
SWIFT_CONF = {'verbose': True,
'debug': True,
'swift_store_user': 'user',
'swift_store_key': 'key',
'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance'}
# We stub out as little as possible to ensure that the code paths
@ -182,7 +182,7 @@ class TestStore(unittest.TestCase):
"""Establish a clean test environment"""
self.stubs = stubout.StubOutForTesting()
stub_out_swift_common_client(self.stubs)
self.store = Store(SWIFT_OPTIONS)
self.store = Store(SWIFT_CONF)
def tearDown(self):
"""Clear the test environment"""
@ -288,12 +288,12 @@ class TestStore(unittest.TestCase):
expected_swift_contents = "*" * expected_swift_size
expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest()
new_options = SWIFT_OPTIONS.copy()
new_options['swift_store_auth_address'] = variation
new_conf = SWIFT_CONF.copy()
new_conf['swift_store_auth_address'] = variation
image_swift = StringIO.StringIO(expected_swift_contents)
self.store = Store(new_options)
self.store = Store(new_conf)
location, size, checksum = self.store.add(image_id, image_swift,
expected_swift_size)
@ -314,11 +314,11 @@ class TestStore(unittest.TestCase):
Tests that adding an image with a non-existing container
raises an appropriate exception
"""
options = SWIFT_OPTIONS.copy()
options['swift_store_create_container_on_put'] = 'False'
options['swift_store_container'] = 'noexist'
conf = SWIFT_CONF.copy()
conf['swift_store_create_container_on_put'] = 'False'
conf['swift_store_container'] = 'noexist'
image_swift = StringIO.StringIO("nevergonnamakeit")
self.store = Store(options)
self.store = Store(conf)
# We check the exception text to ensure the container
# missing text is found in it, otherwise, we would have
@ -337,9 +337,9 @@ class TestStore(unittest.TestCase):
Tests that adding an image with a non-existing container
creates the container automatically if flag is set
"""
options = SWIFT_OPTIONS.copy()
options['swift_store_create_container_on_put'] = 'True'
options['swift_store_container'] = 'noexist'
conf = SWIFT_CONF.copy()
conf['swift_store_create_container_on_put'] = 'True'
conf['swift_store_container'] = 'noexist'
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
@ -348,7 +348,7 @@ class TestStore(unittest.TestCase):
'/noexist/%s' % expected_image_id
image_swift = StringIO.StringIO(expected_swift_contents)
self.store = Store(options)
self.store = Store(conf)
location, size, checksum = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
@ -372,8 +372,8 @@ class TestStore(unittest.TestCase):
and then verify that there have been a number of calls to
put_object()...
"""
options = SWIFT_OPTIONS.copy()
options['swift_store_container'] = 'glance'
conf = SWIFT_CONF.copy()
conf['swift_store_container'] = 'glance'
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
@ -387,7 +387,7 @@ class TestStore(unittest.TestCase):
try:
glance.store.swift.DEFAULT_LARGE_OBJECT_SIZE = 1024
glance.store.swift.DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 1024
self.store = Store(options)
self.store = Store(conf)
location, size, checksum = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
@ -418,8 +418,8 @@ class TestStore(unittest.TestCase):
Bug lp:891738
"""
options = SWIFT_OPTIONS.copy()
options['swift_store_container'] = 'glance'
conf = SWIFT_CONF.copy()
conf['swift_store_container'] = 'glance'
# Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB
@ -440,7 +440,7 @@ class TestStore(unittest.TestCase):
MAX_SWIFT_OBJECT_SIZE = 1024
glance.store.swift.DEFAULT_LARGE_OBJECT_SIZE = 1024
glance.store.swift.DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 1024
self.store = Store(options)
self.store = Store(conf)
location, size, checksum = self.store.add(expected_image_id,
image_swift, 0)
finally:
@ -471,11 +471,11 @@ class TestStore(unittest.TestCase):
FAKE_UUID, image_swift, 0)
def _option_required(self, key):
options = SWIFT_OPTIONS.copy()
del options[key]
conf = SWIFT_CONF.copy()
del conf[key]
try:
self.store = Store(options)
self.store = Store(conf)
return self.store.add == self.store.add_disabled
except:
return False

View File

@ -46,9 +46,9 @@ class VersionsTest(unittest.TestCase):
def test_get_version_list(self):
req = webob.Request.blank('/')
req.accept = "application/json"
options = {'bind_host': '0.0.0.0',
'bind_port': 9292}
res = req.get_response(versions.Controller(options))
conf = {'bind_host': '0.0.0.0',
'bind_port': 9292}
res = req.get_response(versions.Controller(conf))
self.assertEqual(res.status_int, 300)
self.assertEqual(res.content_type, "application/json")
results = json.loads(res.body)["versions"]