Add cfg's new global CONF object

Implements blueprint cfg-global-object

Begin adopting cfg's global CONF object pattern rather than passing
a conf object around everywhere.

This iterations does just enough to have each of the glance programs
initialize CONF and get the unit tests working.

Change-Id: Ia34959b3ba52cbe933d5d347f31319271b2e14f9
This commit is contained in:
Mark McLoughlin 2012-05-29 08:51:12 +01:00
parent 41d285ab02
commit 1038a19a32
55 changed files with 653 additions and 628 deletions

View File

@ -39,6 +39,9 @@ gettext.install('glance', unicode=1)
from glance.common import config
from glance.common import wsgi
from glance.common import exception
from glance.openstack.common import cfg
CONF = cfg.CONF
def fail(returncode, e):
@ -48,13 +51,12 @@ def fail(returncode, e):
if __name__ == '__main__':
try:
conf = config.GlanceConfigOpts()
conf()
config.parse_args()
app = config.load_paste_app(conf)
app = config.load_paste_app(CONF)
server = wsgi.Server()
server.start(app, conf, default_port=9292)
server.start(app, CONF, default_port=9292)
server.wait()
except exception.WorkerCreationFailure, e:
fail(2, e)

View File

@ -47,14 +47,17 @@ if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
gettext.install('glance', unicode=1)
from glance.common import config
from glance.openstack.common import cfg
CONF = cfg.CONF
if __name__ == '__main__':
try:
conf = config.GlanceCacheConfigOpts()
conf()
config.parse_cache_args()
app = config.load_paste_app(conf, 'glance-cleaner')
app = config.load_paste_app(CONF,
'glance-cleaner', 'glance-cache-paste.ini')
app.run()
except RuntimeError, e:
sys.exit("ERROR: %s" % e)

View File

@ -38,14 +38,17 @@ if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
gettext.install('glance', unicode=1)
from glance.common import config
from glance.openstack.common import cfg
CONF = cfg.CONF
if __name__ == '__main__':
try:
conf = config.GlanceCacheConfigOpts()
conf()
config.parse_cache_args()
app = config.load_paste_app(conf, 'glance-prefetcher')
app = config.load_paste_app(CONF, 'glance-prefetcher',
'glance-cache-paste.ini')
app.run()
except RuntimeError, e:
sys.exit("ERROR: %s" % e)

View File

@ -39,14 +39,17 @@ if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
gettext.install('glance', unicode=1)
from glance.common import config
from glance.openstack.common import cfg
CONF = cfg.CONF
if __name__ == '__main__':
try:
conf = config.GlanceCacheConfigOpts()
conf()
config.parse_cache_args()
app = config.load_paste_app(conf, 'glance-pruner')
app = config.load_paste_app(CONF,
'glance-pruner', 'glance-cache-paste.ini')
app.run()
except RuntimeError, e:
sys.exit("ERROR: %s" % e)

View File

@ -46,6 +46,8 @@ gettext.install('glance', unicode=1)
from glance.common import config
from glance.openstack.common import cfg
CONF = cfg.CONF
ALL_COMMANDS = ['start', 'stop', 'shutdown', 'restart',
'reload', 'force-reload']
ALL_SERVERS = ['glance-api', 'glance-registry', 'glance-scrubber']
@ -77,11 +79,11 @@ def gated_by(predicate):
return wrap
def pid_files(server, conf):
def pid_files(server, pid_file):
pid_files = []
if conf.pid_file:
if os.path.exists(os.path.abspath(conf.pid_file)):
pid_files = [os.path.abspath(conf.pid_file)]
if pid_file:
if os.path.exists(os.path.abspath(pid_file)):
pid_files = [os.path.abspath(pid_file)]
else:
if os.path.exists('/var/run/glance/%s.pid' % server):
pid_files = ['/var/run/glance/%s.pid' % server]
@ -90,9 +92,9 @@ def pid_files(server, conf):
yield pid_file, pid
def do_start(verb, server, conf, args):
def do_start(verb, server, args):
if verb != 'Respawn':
for pid_file, pid in pid_files(server, conf):
for pid_file, pid in pid_files(server, CONF.pid_file):
if os.path.exists('/proc/%s' % pid):
print ("%s appears to already be running: %s" %
(server, pid_file))
@ -143,23 +145,23 @@ def do_start(verb, server, conf, args):
except OSError:
pass
def redirect_stdio(conf, server):
def redirect_stdio(server, capture_output):
input = [sys.stdin.fileno()]
output = [sys.stdout.fileno(), sys.stderr.fileno()]
redirect_to_null(input)
if conf.capture_output:
if capture_output:
redirect_to_syslog(output, server)
else:
redirect_to_null(output)
@gated_by(conf.capture_output)
@gated_by(CONF.capture_output)
def close_stdio_on_exec():
fds = [sys.stdin.fileno(), sys.stdout.fileno(), sys.stderr.fileno()]
for desc in fds: # set close on exec flag
fcntl.fcntl(desc, fcntl.F_SETFD, fcntl.FD_CLOEXEC)
def launch(pid_file, conf_file=None):
def launch(pid_file, conf_file=None, capture_output=False, await_time=0):
args = [server]
print '%sing %s' % (verb, server),
if conf_file:
@ -172,7 +174,7 @@ def do_start(verb, server, conf, args):
pid = os.fork()
if pid == 0:
os.setsid()
redirect_stdio(conf, server)
redirect_stdio(server, capture_output)
try:
os.execlp('%s' % server, *args)
except OSError, e:
@ -181,12 +183,12 @@ def do_start(verb, server, conf, args):
sys.exit(0)
else:
write_pid_file(pid_file, pid)
await_child(pid)
await_child(pid, await_time)
return pid
@gated_by(conf.await_child)
def await_child(pid):
bail_time = time.time() + conf.await_child
@gated_by(CONF.await_child)
def await_child(pid, await_time):
bail_time = time.time() + await_time
while time.time() < bail_time:
reported_pid, status = os.waitpid(pid, os.WNOHANG)
if reported_pid == pid:
@ -195,28 +197,28 @@ def do_start(verb, server, conf, args):
break
time.sleep(0.05)
pid_file = get_pid_file(server, conf)
pid_file = get_pid_file(server, CONF.pid_file)
conf_file = None
if args and os.path.exists(args[0]):
conf_file = os.path.abspath(os.path.expanduser(args[0]))
return launch(pid_file, conf_file)
return launch(pid_file, conf_file, CONF.capture_output, CONF.await_child)
def get_pid_file(pid, conf):
return (os.path.abspath(conf.pid_file) if conf.pid_file else
def get_pid_file(pid, pid_file):
return (os.path.abspath(pid_file) if pid_file else
'/var/run/glance/%s.pid' % server)
def do_stop(server, conf, args, graceful=False):
def do_stop(server, args, graceful=False):
if graceful and server in GRACEFUL_SHUTDOWN_SERVERS:
sig = signal.SIGHUP
else:
sig = signal.SIGTERM
did_anything = False
pfiles = pid_files(server, conf)
pfiles = pid_files(server, CONF.pid_file)
for pid_file, pid in pfiles:
did_anything = True
try:
@ -243,8 +245,6 @@ def do_stop(server, conf, args, graceful=False):
if __name__ == '__main__':
exitcode = 0
conf = config.GlanceConfigOpts(usage=USAGE)
opts = [
cfg.StrOpt('pid-file',
metavar='PATH',
@ -264,12 +264,12 @@ if __name__ == '__main__':
default=False,
help='Restart service on unexpected death'),
]
conf.register_cli_opts(opts)
CONF.register_cli_opts(opts)
args = conf()
args = config.parse_args(usage=USAGE)
@gated_by(conf.await_child)
@gated_by(conf.respawn)
@gated_by(CONF.await_child)
@gated_by(CONF.respawn)
def mutually_exclusive():
sys.stderr.write('--await-child and --respawn are mutually exclusive')
sys.exit(1)
@ -277,7 +277,7 @@ if __name__ == '__main__':
mutually_exclusive()
if len(args) < 2:
conf.print_usage()
CONF.print_usage()
sys.exit(1)
server = args.pop(0).lower()
@ -301,19 +301,19 @@ if __name__ == '__main__':
"command in this list: %(command_list)s" % locals())
sys.exit(msg)
@gated_by(conf.respawn)
@gated_by(CONF.respawn)
def anticipate_respawn(children):
while children:
pid, status = os.wait()
if pid in children:
(server, conf, args) = children.pop(pid)
pid_file = get_pid_file(server, conf)
(server, args) = children.pop(pid)
pid_file = get_pid_file(server, CONF.pid_file)
running = os.path.exists(pid_file)
one_second_ago = time.time() - 1
bouncing = (running and
os.path.getmtime(pid_file) >= one_second_ago)
if running and not bouncing:
args = (server, conf, args)
args = (server, args)
new_pid = do_start('Respawn', *args)
children[new_pid] = args
else:
@ -323,7 +323,7 @@ if __name__ == '__main__':
if command == 'start':
children = {}
for server in servers:
args = (server, conf, args)
args = (server, args)
pid = do_start('Start', *args)
children[pid] = args
@ -331,21 +331,21 @@ if __name__ == '__main__':
if command == 'stop':
for server in servers:
do_stop(server, conf, args)
do_stop(server, args)
if command == 'shutdown':
for server in servers:
do_stop(server, conf, args, graceful=True)
do_stop(server, args, graceful=True)
if command == 'restart':
for server in servers:
do_stop(server, conf, args)
do_stop(server, args)
for server in servers:
do_start('Restart', server, conf, args)
do_start('Restart', server, args)
if command == 'reload' or command == 'force-reload':
for server in servers:
do_stop(server, conf, args, graceful=True)
do_start(server, conf, args)
do_stop(server, args, graceful=True)
do_start(server, args)
sys.exit(exitcode)

View File

@ -47,49 +47,51 @@ import glance.db
import glance.db.api
import glance.db.migration
CONF = cfg.CONF
def do_db_version(conf, args):
def do_db_version(args):
"""Print database's current migration level"""
print glance.db.migration.db_version(conf)
print glance.db.migration.db_version(CONF)
def do_upgrade(conf, args):
def do_upgrade(args):
"""Upgrade the database's migration level"""
version = args.pop(0) if args else None
glance.db.migration.upgrade(conf, version)
glance.db.migration.upgrade(CONF, version)
def do_downgrade(conf, args):
def do_downgrade(args):
"""Downgrade the database's migration level"""
if not args:
raise exception.MissingArgumentError(
"downgrade requires a version argument")
version = args.pop(0)
glance.db.migration.downgrade(conf, version)
glance.db.migration.downgrade(CONF, version)
def do_version_control(conf, args):
def do_version_control(args):
"""Place a database under migration control"""
version = args.pop(0) if args else None
glance.db.migration.version_control(conf, version)
glance.db.migration.version_control(CONF, version)
def do_db_sync(conf, args):
def do_db_sync(args):
"""
Place a database under migration control and upgrade,
creating first if necessary.
"""
# override auto-create flag, as complete DB should always
# be created on sync if not already existing
conf.db_auto_create = True
glance.db.api.configure_db(conf)
CONF.db_auto_create = True
glance.db.api.configure_db(CONF)
version = args.pop(0) if args else None
current_version = args.pop(0) if args else None
glance.db.migration.db_sync(conf, version, current_version)
glance.db.migration.db_sync(CONF, version, current_version)
def dispatch_cmd(conf, args):
def dispatch_cmd(args):
"""Search for do_* cmd in this module and then run it"""
cmd = args.pop(0)
try:
@ -98,7 +100,7 @@ def dispatch_cmd(conf, args):
sys.exit("ERROR: unrecognized command '%s'" % cmd)
try:
cmd_func(conf, args)
cmd_func(args)
except exception.GlanceException, e:
sys.exit("ERROR: %s" % e)
@ -107,22 +109,22 @@ def main():
try:
# We load the glance-registry config section because
# sql_connection is only part of the glance registry.
glance.db.add_cli_options()
default_cfg_files = cfg.find_config_files(project='glance',
prog='glance-registry')
conf = config.GlanceConfigOpts(default_config_files=default_cfg_files,
usage="%prog [options] <cmd>")
glance.db.add_options(conf)
args = conf()
config.setup_logging(conf)
args = config.parse_args(default_config_files=default_cfg_files,
usage="%prog [options] <cmd>")
config.setup_logging(CONF)
except RuntimeError, e:
sys.exit("ERROR: %s" % e)
if not args:
conf.print_usage()
CONF.print_usage()
sys.exit(1)
dispatch_cmd(conf, args)
dispatch_cmd(args)
if __name__ == '__main__':

View File

@ -38,17 +38,19 @@ gettext.install('glance', unicode=1)
from glance.common import config
from glance.common import wsgi
from glance.openstack.common import cfg
CONF = cfg.CONF
if __name__ == '__main__':
try:
conf = config.GlanceConfigOpts()
conf()
config.parse_args()
app = config.load_paste_app(conf)
app = config.load_paste_app(CONF)
server = wsgi.Server()
server.start(app, conf, default_port=9191)
server.start(app, CONF, default_port=9191)
server.wait()
except RuntimeError, e:
sys.exit("ERROR: %s" % e)

View File

@ -38,11 +38,12 @@ from glance.common import config
from glance.openstack.common import cfg
from glance.store import scrubber
CONF = cfg.CONF
if __name__ == '__main__':
try:
conf = config.GlanceConfigOpts()
conf.register_cli_opt(
CONF.register_cli_opt(
cfg.BoolOpt('daemon',
short='D',
default=False,
@ -51,13 +52,14 @@ if __name__ == '__main__':
'once and then exits. When specified do not exit '
'and run scrub on wakeup_time interval as '
'specified in the config.'))
conf.register_opt(cfg.IntOpt('wakeup_time', default=300))
conf()
CONF.register_opt(cfg.IntOpt('wakeup_time', default=300))
app = config.load_paste_app(conf, 'glance-scrubber')
config.parse_args()
if conf.daemon:
server = scrubber.Daemon(conf.wakeup_time)
app = config.load_paste_app(CONF, 'glance-scrubber')
if CONF.daemon:
server = scrubber.Daemon(CONF.wakeup_time)
server.start(app)
server.wait()
else:

View File

@ -27,19 +27,19 @@ from glance.openstack.common import cfg
logger = logging.getLogger(__name__)
policy_opts = (
cfg.StrOpt('policy_file', default=None),
cfg.StrOpt('policy_default_rule', default='default'),
)
CONF = cfg.CONF
CONF.register_opts(policy_opts)
class Enforcer(object):
"""Responsible for loading and enforcing rules"""
policy_opts = (
cfg.StrOpt('policy_file', default=None),
cfg.StrOpt('policy_default_rule', default='default'),
)
def __init__(self, conf):
for opt in self.policy_opts:
conf.register_opt(opt)
self.default_rule = conf.policy_default_rule
self.policy_path = self._find_policy_file(conf)
self.policy_file_mtime = None

View File

@ -66,6 +66,9 @@ IMAGE_SIZE_CAP = 1 << 50
# identity check (not equality).
default_store_opt = cfg.StrOpt('default_store', default='file')
CONF = cfg.CONF
CONF.register_opt(default_store_opt)
class Controller(controller.BaseController):
"""
@ -88,7 +91,6 @@ class Controller(controller.BaseController):
def __init__(self, conf):
self.conf = conf
self.conf.register_opt(default_store_opt)
create_stores(self.conf)
self.verify_scheme_or_exit(self.conf.default_store)
self.notifier = notifier.Notifier(conf)

View File

@ -30,8 +30,6 @@ from glance.common import wsgi
from glance.openstack.common import cfg
from glance import version
paste_deploy_group = cfg.OptGroup('paste_deploy')
paste_deploy_opts = [
cfg.StrOpt('flavor'),
cfg.StrOpt('config_file'),
@ -42,26 +40,22 @@ common_opts = [
'beyond what the image schema provides'),
]
class GlanceConfigOpts(cfg.CommonConfigOpts):
def __init__(self, default_config_files=None, **kwargs):
super(GlanceConfigOpts, self).__init__(
project='glance',
version='%%prog %s' % version.version_string(),
default_config_files=default_config_files,
**kwargs)
self.register_opts(common_opts)
self.default_paste_file = self.prog + '-paste.ini'
CONF = cfg.CONF
CONF.register_opts(paste_deploy_opts, group='paste_deploy')
CONF.register_opts(common_opts)
class GlanceCacheConfigOpts(GlanceConfigOpts):
def parse_args(args=None, usage=None, default_config_files=None):
return CONF(args=args,
project='glance',
version='%%prog %s' % version.version_string(),
usage=usage,
default_config_files=default_config_files)
def __init__(self, **kwargs):
config_files = cfg.find_config_files(project='glance',
prog='glance-cache')
super(GlanceCacheConfigOpts, self).__init__(config_files, **kwargs)
self.default_paste_file = 'glance-cache-paste.ini'
def parse_cache_args(args=None):
config_files = cfg.find_config_files(project='glance', prog='glance-cache')
return parse_args(args=args, default_config_files=config_files)
def setup_logging(conf):
@ -111,16 +105,6 @@ def setup_logging(conf):
root_logger.addHandler(handler)
def _register_paste_deploy_opts(conf):
"""
Idempotent registration of paste_deploy option group
:param conf: a cfg.ConfigOpts object
"""
conf.register_group(paste_deploy_group)
conf.register_opts(paste_deploy_opts, group=paste_deploy_group)
def _get_deployment_flavor(conf):
"""
Retrieve the paste_deploy.flavor config item, formatted appropriately
@ -128,42 +112,41 @@ def _get_deployment_flavor(conf):
:param conf: a cfg.ConfigOpts object
"""
_register_paste_deploy_opts(conf)
flavor = conf.paste_deploy.flavor
return '' if not flavor else ('-' + flavor)
def _get_paste_config_path(conf):
def _get_paste_config_path(conf, default_paste_file=None):
paste_suffix = '-paste.ini'
conf_suffix = '.conf'
if conf.config_file:
# Assume paste config is in a paste.ini file corresponding
# to the last config file
path = conf.config_file[-1].replace(conf_suffix, paste_suffix)
elif default_paste_file:
path = default_paste_file
else:
path = conf.default_paste_file
path = conf.prog + '-paste.ini'
return conf.find_file(os.path.basename(path))
def _get_deployment_config_file(conf):
def _get_deployment_config_file(conf, default_paste_file=None):
"""
Retrieve the deployment_config_file config item, formatted as an
absolute pathname.
:param conf: a cfg.ConfigOpts object
"""
_register_paste_deploy_opts(conf)
config_file = conf.paste_deploy.config_file
path = _get_paste_config_path(conf) if not config_file else config_file
path = conf.paste_deploy.config_file
if not path:
path = _get_paste_config_path(conf, default_paste_file)
if not path:
msg = "Unable to locate paste config file for %s." % conf.prog
raise RuntimeError(msg)
return os.path.abspath(path)
def load_paste_app(conf, app_name=None):
def load_paste_app(conf, app_name=None, default_paste_file=None):
"""
Builds and returns a WSGI app from a paste config file.
@ -183,7 +166,7 @@ def load_paste_app(conf, app_name=None):
# in order to identify the appropriate paste pipeline
app_name += _get_deployment_flavor(conf)
conf_file = _get_deployment_config_file(conf)
conf_file = _get_deployment_config_file(conf, default_paste_file)
try:
# Setup logging early

View File

@ -22,6 +22,15 @@ from glance.common import wsgi
from glance.openstack.common import cfg
from glance.db import api as db_api
context_opts = [
cfg.BoolOpt('owner_is_tenant', default=True),
cfg.StrOpt('admin_role', default='admin'),
cfg.BoolOpt('allow_anonymous_access', default=False),
]
CONF = cfg.CONF
CONF.register_opts(context_opts)
class RequestContext(object):
"""
@ -129,15 +138,8 @@ class RequestContext(object):
class ContextMiddleware(wsgi.Middleware):
opts = [
cfg.BoolOpt('owner_is_tenant', default=True),
cfg.StrOpt('admin_role', default='admin'),
cfg.BoolOpt('allow_anonymous_access', default=False),
]
def __init__(self, app, conf, **local_conf):
self.conf = conf
self.conf.register_opts(self.opts)
super(ContextMiddleware, self).__init__(app)
def process_request(self, req):

View File

@ -59,6 +59,11 @@ socket_opts = [
workers_opt = cfg.IntOpt('workers', default=0)
CONF = cfg.CONF
CONF.register_opts(bind_opts)
CONF.register_opts(socket_opts)
CONF.register_opt(workers_opt)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
@ -73,7 +78,6 @@ class WritableLogger(object):
def get_bind_addr(conf, default_port=None):
"""Return the host and port to bind to."""
conf.register_opts(bind_opts)
return (conf.bind_host, conf.bind_port or default_port)
@ -98,8 +102,6 @@ def get_socket(conf, default_port):
bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)
if addr[0] in (socket.AF_INET, socket.AF_INET6)][0]
conf.register_opts(socket_opts)
cert_file = conf.cert_file
key_file = conf.key_file
use_ssl = cert_file or key_file
@ -169,7 +171,6 @@ class Server(object):
self.application = application
self.sock = get_socket(conf, default_port)
conf.register_opt(workers_opt)
self.logger = logging.getLogger('eventlet.wsgi.server')

View File

@ -19,21 +19,22 @@
from glance.openstack.common import cfg
sql_connection_opt = cfg.StrOpt('sql_connection',
default='sqlite:///glance.sqlite',
metavar='CONNECTION',
help='A valid SQLAlchemy connection '
'string for the registry database. '
'Default: %default')
def add_options(conf):
CONF = cfg.CONF
CONF.register_opt(sql_connection_opt)
def add_cli_options():
"""
Adds any configuration options that the db layer might have.
:param conf: A ConfigOpts object
:retval None
"""
conf.register_group(cfg.OptGroup('registrydb',
title='Registry Database Options',
help='The following configuration options '
'are specific to the Glance image '
'registry database.'))
conf.register_cli_opt(cfg.StrOpt('sql_connection',
metavar='CONNECTION',
help='A valid SQLAlchemy connection '
'string for the registry database. '
'Default: %default'))
CONF.unregister_opt(sql_connection_opt)
CONF.register_cli_opt(sql_connection_opt)

View File

@ -37,6 +37,7 @@ from sqlalchemy.sql import or_, and_
from glance.common import exception
from glance.common import utils
from glance.openstack.common import cfg
from glance import db
from glance.db import migration
from glance.db import models
@ -66,12 +67,14 @@ STATUSES = ['active', 'saving', 'queued', 'killed', 'pending_delete',
db_opts = [
cfg.IntOpt('sql_idle_timeout', default=3600),
cfg.StrOpt('sql_connection', default='sqlite:///glance.sqlite'),
cfg.IntOpt('sql_max_retries', default=10),
cfg.IntOpt('sql_retry_interval', default=1),
cfg.BoolOpt('db_auto_create', default=True),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
class MySQLPingListener(object):
@ -103,10 +106,6 @@ def configure_db(conf):
"""
global _ENGINE, sa_logger, logger, _MAX_RETRIES, _RETRY_INTERVAL
if not _ENGINE:
for opt in db_opts:
# avoid duplicate registration
if not opt.name in conf:
conf.register_opt(opt)
sql_connection = conf.sql_connection
_MAX_RETRIES = conf.sql_max_retries
_RETRY_INTERVAL = conf.sql_retry_interval

View File

@ -27,6 +27,17 @@ from glance.openstack.common import cfg
from glance.openstack.common import importutils
logger = logging.getLogger(__name__)
image_cache_opts = [
cfg.StrOpt('image_cache_driver', default='sqlite'),
cfg.IntOpt('image_cache_max_size', default=10 * (1024 ** 3)), # 10 GB
cfg.IntOpt('image_cache_stall_time', default=86400), # 24 hours
cfg.StrOpt('image_cache_dir'),
]
CONF = cfg.CONF
CONF.register_opts(image_cache_opts)
DEFAULT_MAX_CACHE_SIZE = 10 * 1024 * 1024 * 1024 # 10 GB
@ -34,16 +45,8 @@ class ImageCache(object):
"""Provides an LRU cache for image data."""
opts = [
cfg.StrOpt('image_cache_driver', default='sqlite'),
cfg.IntOpt('image_cache_max_size', default=10 * (1024 ** 3)), # 10 GB
cfg.IntOpt('image_cache_stall_time', default=86400), # 24 hours
cfg.StrOpt('image_cache_dir'),
]
def __init__(self, conf):
self.conf = conf
self.conf.register_opts(self.opts)
self.init_driver()
def init_driver(self):

View File

@ -34,6 +34,14 @@ from glance.image_cache.drivers import base
from glance.openstack.common import cfg
logger = logging.getLogger(__name__)
sqlite_opts = [
cfg.StrOpt('image_cache_sqlite_db', default='cache.db'),
]
CONF = cfg.CONF
CONF.register_opts(sqlite_opts)
DEFAULT_SQL_CALL_TIMEOUT = 2
@ -79,10 +87,6 @@ class Driver(base.Driver):
that has atimes set.
"""
opts = [
cfg.StrOpt('image_cache_sqlite_db', default='cache.db'),
]
def configure(self):
"""
Configure the driver to use the stored configuration options
@ -92,8 +96,6 @@ class Driver(base.Driver):
"""
super(Driver, self).configure()
self.conf.register_opts(self.opts)
# Create the SQLite database that will hold our cache attributes
self.initialize_db()

View File

@ -24,6 +24,12 @@ from glance.common import exception
from glance.openstack.common import cfg
from glance.openstack.common import importutils
notifier_opts = [
cfg.StrOpt('notifier_strategy', default='default')
]
CONF = cfg.CONF
CONF.register_opts(notifier_opts)
_STRATEGIES = {
"logging": "glance.notifier.notify_log.LoggingStrategy",
@ -37,12 +43,7 @@ _STRATEGIES = {
class Notifier(object):
"""Uses a notification strategy to send out messages about events."""
opts = [
cfg.StrOpt('notifier_strategy', default='default')
]
def __init__(self, conf, strategy=None):
conf.register_opts(self.opts)
strategy = conf.notifier_strategy
try:
strategy_cls = _STRATEGIES[strategy]

View File

@ -24,9 +24,26 @@ import kombu.entity
from glance.notifier import strategy
from glance.openstack.common import cfg
logger = logging.getLogger('glance.notifier.notify_kombu')
rabbit_opts = [
cfg.StrOpt('rabbit_host', default='localhost'),
cfg.IntOpt('rabbit_port', default=5672),
cfg.BoolOpt('rabbit_use_ssl', default=False),
cfg.StrOpt('rabbit_userid', default='guest'),
cfg.StrOpt('rabbit_password', default='guest'),
cfg.StrOpt('rabbit_virtual_host', default='/'),
cfg.StrOpt('rabbit_notification_exchange', default='glance'),
cfg.StrOpt('rabbit_notification_topic',
default='glance_notifications'),
cfg.StrOpt('rabbit_max_retries', default=0),
cfg.StrOpt('rabbit_retry_backoff', default=2),
cfg.StrOpt('rabbit_retry_max_backoff', default=30)
]
CONF = cfg.CONF
CONF.register_opts(rabbit_opts)
class KombuMaxRetriesReached(Exception):
pass
@ -35,25 +52,9 @@ class KombuMaxRetriesReached(Exception):
class RabbitStrategy(strategy.Strategy):
"""A notifier that puts a message on a queue when called."""
opts = [
cfg.StrOpt('rabbit_host', default='localhost'),
cfg.IntOpt('rabbit_port', default=5672),
cfg.BoolOpt('rabbit_use_ssl', default=False),
cfg.StrOpt('rabbit_userid', default='guest'),
cfg.StrOpt('rabbit_password', default='guest'),
cfg.StrOpt('rabbit_virtual_host', default='/'),
cfg.StrOpt('rabbit_notification_exchange', default='glance'),
cfg.StrOpt('rabbit_notification_topic',
default='glance_notifications'),
cfg.StrOpt('rabbit_max_retries', default=0),
cfg.StrOpt('rabbit_retry_backoff', default=2),
cfg.StrOpt('rabbit_retry_max_backoff', default=30)
]
def __init__(self, conf):
"""Initialize the rabbit notification strategy."""
self._conf = conf
self._conf.register_opts(self.opts)
self.topic = self._conf.rabbit_notification_topic
self.max_retries = self._conf.rabbit_max_retries

View File

@ -22,10 +22,8 @@ import qpid.messaging
from glance.notifier import strategy
from glance.openstack.common import cfg
logger = logging.getLogger('glance.notifier.notify_qpid')
qpid_opts = [
cfg.StrOpt('qpid_notification_exchange',
default='glance',
@ -74,6 +72,9 @@ qpid_opts = [
help='Disable Nagle algorithm'),
]
CONF = cfg.CONF
CONF.register_opts(qpid_opts)
class QpidStrategy(strategy.Strategy):
"""A notifier that puts a message on a queue when called."""
@ -81,7 +82,6 @@ class QpidStrategy(strategy.Strategy):
def __init__(self, conf):
"""Initialize the Qpid notification strategy."""
self.conf = conf
self.conf.register_opts(qpid_opts)
self.broker = self.conf.qpid_hostname + ":" + self.conf.qpid_port
self.connection = qpid.messaging.Connection(self.broker)

View File

@ -95,7 +95,7 @@ and --config-dir::
class ConfigOpts(object):
def __init__(self, ...):
def __call__(self, ...):
opts = [
MultiStrOpt('config-file',
@ -233,6 +233,22 @@ log files:
...
]
This module also contains a global instance of the CommonConfigOpts class
in order to support a common usage pattern in OpenStack:
from openstack.common import cfg
opts = [
cfg.StrOpt('bind_host' default='0.0.0.0'),
cfg.IntOpt('bind_port', default=9292),
]
CONF = cfg.CONF
CONF.register_opts(opts)
def start(server, app):
server.start(app, CONF.bind_port, CONF.bind_host)
"""
import collections
@ -768,6 +784,14 @@ class OptGroup(object):
return True
def _unregister_opt(self, opt):
"""Remove an opt from this group.
:param opt: an Opt object
"""
if opt.dest in self._opts:
del self._opts[opt.dest]
def _get_optparse_group(self, parser):
"""Build an optparse.OptionGroup for this group."""
if self._optparse_group is None:
@ -775,6 +799,10 @@ class OptGroup(object):
self.help)
return self._optparse_group
def _clear(self):
"""Clear this group's option parsing state."""
self._optparse_group = None
class ParseError(iniparser.ParseError):
def __init__(self, msg, lineno, line, filename):
@ -849,57 +877,41 @@ class ConfigOpts(collections.Mapping):
the values of options.
"""
def __init__(self,
project=None,
prog=None,
version=None,
usage=None,
default_config_files=None):
"""Construct a ConfigOpts object.
def __init__(self):
"""Construct a ConfigOpts object."""
self._opts = {} # dict of dicts of (opt:, override:, default:)
self._groups = {}
Automatically registers the --config-file option with either a supplied
list of default config files, or a list from find_config_files().
self._args = None
self._oparser = None
self._cparser = None
self._cli_values = {}
self.__cache = {}
self._config_opts = []
self._disable_interspersed_args = False
:param project: the toplevel project name, used to locate config files
:param prog: the name of the program (defaults to sys.argv[0] basename)
:param version: the program version (for --version)
:param usage: a usage string (%prog will be expanded)
:param default_config_files: config files to use by default
"""
def _setup(self, project, prog, version, usage, default_config_files):
"""Initialize a ConfigOpts object for option parsing."""
if prog is None:
prog = os.path.basename(sys.argv[0])
if default_config_files is None:
default_config_files = find_config_files(project, prog)
self.project = project
self.prog = prog
self.version = version
self.usage = usage
self.default_config_files = default_config_files
self._oparser = optparse.OptionParser(prog=prog,
version=version,
usage=usage)
if self._disable_interspersed_args:
self._oparser.disable_interspersed_args()
self._opts = {} # dict of dicts of (opt:, override:, default:)
self._groups = {}
self._args = None
self._cli_values = {}
self._oparser = optparse.OptionParser(prog=self.prog,
version=self.version,
usage=self.usage)
self._cparser = None
self.__cache = {}
opts = [
self._config_opts = [
MultiStrOpt('config-file',
default=self.default_config_files,
default=default_config_files,
metavar='PATH',
help='Path to a config file to use. Multiple config '
'files can be specified, with values in later '
'files taking precedence. The default files '
' used are: %s' %
(self.default_config_files, )),
' used are: %s' % (default_config_files, )),
StrOpt('config-dir',
metavar='DIR',
help='Path to a config directory to pull *.conf '
@ -910,7 +922,13 @@ class ConfigOpts(collections.Mapping):
'hence over-ridden options in the directory take '
'precedence.'),
]
self.register_cli_opts(opts)
self.register_cli_opts(self._config_opts)
self.project = project
self.prog = prog
self.version = version
self.usage = usage
self.default_config_files = default_config_files
def __clear_cache(f):
@functools.wraps(f)
@ -921,7 +939,13 @@ class ConfigOpts(collections.Mapping):
return __inner
def __call__(self, args=None):
def __call__(self,
args=None,
project=None,
prog=None,
version=None,
usage=None,
default_config_files=None):
"""Parse command line arguments and config files.
Calling a ConfigOpts object causes the supplied command line arguments
@ -931,35 +955,34 @@ class ConfigOpts(collections.Mapping):
The object may be called multiple times, each time causing the previous
set of values to be overwritten.
Automatically registers the --config-file option with either a supplied
list of default config files, or a list from find_config_files().
If the --config-dir option is set, any *.conf files from this
directory are pulled in, after all the file(s) specified by the
--config-file option.
:params args: command line arguments (defaults to sys.argv[1:])
:param args: command line arguments (defaults to sys.argv[1:])
:param project: the toplevel project name, used to locate config files
:param prog: the name of the program (defaults to sys.argv[0] basename)
:param version: the program version (for --version)
:param usage: a usage string (%prog will be expanded)
:param default_config_files: config files to use by default
:returns: the list of arguments left over after parsing options
:raises: SystemExit, ConfigFilesNotFoundError, ConfigFileParseError,
RequiredOptError
RequiredOptError, DuplicateOptError
"""
self.clear()
self._args = args
self._setup(project, prog, version, usage, default_config_files)
(values, args) = self._oparser.parse_args(self._args)
self._cli_values, leftovers = self._parse_cli_opts(args)
self._cli_values = vars(values)
def _list_config_dir():
return sorted(glob.glob(os.path.join(self.config_dir, '*.conf')))
from_file = list(self.config_file)
from_dir = _list_config_dir() if self.config_dir else []
self._parse_config_files(from_file + from_dir)
self._parse_config_files()
self._check_required_opts()
return args
return leftovers
def __getattr__(self, name):
"""Look up an option value and perform string substitution.
@ -996,8 +1019,12 @@ class ConfigOpts(collections.Mapping):
def clear(self):
"""Clear the state of the object to before it was called."""
self._args = None
self._cli_values = {}
self._cli_values.clear()
self._oparser = None
self._cparser = None
self.unregister_opts(self._config_opts)
for group in self._groups.values():
group._clear()
@__clear_cache
def register_opt(self, opt, group=None):
@ -1044,15 +1071,7 @@ class ConfigOpts(collections.Mapping):
if self._args is not None:
raise ArgsAlreadyParsedError("cannot register CLI option")
if not self.register_opt(opt, group, clear_cache=False):
return False
if group is not None:
group = self._get_group(group, autocreate=True)
opt._add_to_cli(self._oparser, group)
return True
return self.register_opt(opt, group, clear_cache=False)
@__clear_cache
def register_cli_opts(self, opts, group=None):
@ -1073,6 +1092,28 @@ class ConfigOpts(collections.Mapping):
self._groups[group.name] = copy.copy(group)
@__clear_cache
def unregister_opt(self, opt, group=None):
"""Unregister an option.
:param opt: an Opt object
:param group: an optional OptGroup object or group name
:raises: ArgsAlreadyParsedError, NoSuchGroupError
"""
if self._args is not None:
raise ArgsAlreadyParsedError("reset before unregistering options")
if group is not None:
self._get_group(group)._unregister_opt(opt)
elif opt.dest in self._opts:
del self._opts[opt.dest]
@__clear_cache
def unregister_opts(self, opts, group=None):
"""Unregister multiple CLI option schemas at once."""
for opt in opts:
self.unregister_opt(opt, group, clear_cache=False)
@__clear_cache
def set_override(self, name, override, group=None):
"""Override an opt value.
@ -1103,16 +1144,24 @@ class ConfigOpts(collections.Mapping):
opt_info = self._get_opt_info(name, group)
opt_info['default'] = default
def _all_opt_infos(self):
"""A generator function for iteration opt infos."""
for info in self._opts.values():
yield info, None
for group in self._groups.values():
for info in group._opts.values():
yield info, group
def _all_opts(self):
"""A generator function for iteration opts."""
for info, group in self._all_opt_infos():
yield info['opt'], group
def _unset_defaults_and_overrides(self):
"""Unset any default or override on all options."""
def unset(opts):
for info in opts.values():
info['default'] = None
info['override'] = None
unset(self._opts)
for group in self._groups.values():
unset(group._opts)
for info, group in self._all_opt_infos():
info['default'] = None
info['override'] = None
def disable_interspersed_args(self):
"""Set parsing to stop on the first non-option.
@ -1131,13 +1180,13 @@ class ConfigOpts(collections.Mapping):
i.e. argument parsing is stopped at the first non-option argument.
"""
self._oparser.disable_interspersed_args()
self._disable_interspersed_args = True
def enable_interspersed_args(self):
"""Set parsing to not stop on the first non-option.
This it the default behaviour."""
self._oparser.enable_interspersed_args()
self._disable_interspersed_args = False
def find_file(self, name):
"""Locate a file located alongside the config files.
@ -1331,11 +1380,17 @@ class ConfigOpts(collections.Mapping):
return opts[opt_name]
def _parse_config_files(self, config_files):
"""Parse the supplied configuration files.
def _parse_config_files(self):
"""Parse the config files from --config-file and --config-dir.
:raises: ConfigFilesNotFoundError, ConfigFileParseError
"""
config_files = list(self.config_file)
if self.config_dir:
config_dir_glob = os.path.join(self.config_dir, '*.conf')
config_files += sorted(glob.glob(config_dir_glob))
self._cparser = MultiConfigParser()
try:
@ -1347,8 +1402,12 @@ class ConfigOpts(collections.Mapping):
not_read_ok = filter(lambda f: f not in read_ok, config_files)
raise ConfigFilesNotFoundError(not_read_ok)
def _do_check_required_opts(self, opts, group=None):
for info in opts.values():
def _check_required_opts(self):
"""Check that all opts marked as required have values specified.
:raises: RequiredOptError
"""
for info, group in self._all_opt_infos():
default, opt, override = [info[k] for k in sorted(info.keys())]
if opt.required:
@ -1359,15 +1418,25 @@ class ConfigOpts(collections.Mapping):
if self._get(opt.name, group) is None:
raise RequiredOptError(opt.name, group)
def _check_required_opts(self):
"""Check that all opts marked as required have values specified.
def _parse_cli_opts(self, args):
"""Parse command line options.
Initializes the command line option parser and parses the supplied
command line arguments.
:param args: the command line arguments
:returns: a dict of parsed option values
:raises: SystemExit, DuplicateOptError
:raises: RequiredOptError
"""
self._do_check_required_opts(self._opts)
self._args = args
for group in self._groups.values():
self._do_check_required_opts(group._opts, group)
for opt, group in self._all_opts():
opt._add_to_cli(self._oparser, group)
values, leftovers = self._oparser.parse_args(args)
return vars(values), leftovers
class GroupAttr(collections.Mapping):
@ -1483,7 +1552,10 @@ class CommonConfigOpts(ConfigOpts):
help='syslog facility to receive log lines')
]
def __init__(self, **kwargs):
super(CommonConfigOpts, self).__init__(**kwargs)
def __init__(self):
super(CommonConfigOpts, self).__init__()
self.register_cli_opts(self.common_cli_opts)
self.register_cli_opts(self.logging_cli_opts)
CONF = CommonConfigOpts()

View File

@ -28,14 +28,6 @@ from glance.registry import client
logger = logging.getLogger('glance.registry')
_CLIENT_CREDS = None
_CLIENT_HOST = None
_CLIENT_PORT = None
_CLIENT_KWARGS = {}
# AES key used to encrypt 'location' metadata
_METADATA_ENCRYPTION_KEY = None
registry_addr_opts = [
cfg.StrOpt('registry_host', default='0.0.0.0'),
cfg.IntOpt('registry_port', default=9191),
@ -56,9 +48,20 @@ registry_client_ctx_opts = [
cfg.StrOpt('auth_region'),
]
CONF = cfg.CONF
CONF.register_opts(registry_addr_opts)
CONF.register_opts(registry_client_opts)
CONF.register_opts(registry_client_ctx_opts)
_CLIENT_CREDS = None
_CLIENT_HOST = None
_CLIENT_PORT = None
_CLIENT_KWARGS = {}
# AES key used to encrypt 'location' metadata
_METADATA_ENCRYPTION_KEY = None
def get_registry_addr(conf):
conf.register_opts(registry_addr_opts)
return (conf.registry_host, conf.registry_port)
@ -80,8 +83,6 @@ def configure_registry_client(conf):
logger.error(msg)
raise exception.BadRegistryConnectionConfiguration(msg)
conf.register_opts(registry_client_opts)
_CLIENT_HOST = host
_CLIENT_PORT = port
_METADATA_ENCRYPTION_KEY = conf.metadata_encryption_key
@ -95,7 +96,6 @@ def configure_registry_client(conf):
def configure_registry_admin_creds(conf):
global _CLIENT_CREDS
conf.register_opts(registry_client_ctx_opts)
if conf.auth_url or os.getenv('OS_AUTH_URL'):
strategy = 'keystone'

View File

@ -29,9 +29,16 @@ from glance.common import wsgi
from glance.openstack.common import cfg
from glance.db import api as db_api
logger = logging.getLogger('glance.registry.api.v1.images')
images_opts = [
cfg.IntOpt('limit_param_default', default=25),
cfg.IntOpt('api_limit_max', default=1000),
]
CONF = cfg.CONF
CONF.register_opts(images_opts)
DISPLAY_FIELDS_IN_INDEX = ['id', 'name', 'size',
'disk_format', 'container_format',
'checksum']
@ -50,14 +57,8 @@ SUPPORTED_PARAMS = ('limit', 'marker', 'sort_key', 'sort_dir')
class Controller(object):
opts = [
cfg.IntOpt('limit_param_default', default=25),
cfg.IntOpt('api_limit_max', default=1000),
]
def __init__(self, conf):
self.conf = conf
self.conf.register_opts(self.opts)
db_api.configure_db(conf)
def _get_images(self, context, **params):

View File

@ -29,6 +29,18 @@ from glance.store import location
logger = logging.getLogger('glance.store')
store_opts = [
cfg.ListOpt('known_stores',
default=['glance.store.filesystem.Store', ]),
cfg.StrOpt('scrubber_datadir',
default='/var/lib/glance/scrubber'),
cfg.BoolOpt('delayed_delete', default=False),
cfg.IntOpt('scrub_time', default=0),
]
CONF = cfg.CONF
CONF.register_opts(store_opts)
# Set of store objects, constructed in create_stores()
STORES = {}
@ -138,16 +150,11 @@ def _get_store_class(store_entry):
return store_cls
known_stores_opt = cfg.ListOpt('known_stores',
default=('glance.store.filesystem.Store',))
def create_stores(conf):
"""
Registers all store modules and all schemes
from the given config. Duplicates are not re-registered.
"""
conf.register_opt(known_stores_opt)
store_count = 0
for store_entry in conf.known_stores:
store_entry = store_entry.strip()
@ -242,26 +249,14 @@ def get_store_from_location(uri):
return loc.store_name
scrubber_datadir_opt = cfg.StrOpt('scrubber_datadir',
default='/var/lib/glance/scrubber')
def get_scrubber_datadir(conf):
conf.register_opt(scrubber_datadir_opt)
return conf.scrubber_datadir
delete_opts = [
cfg.BoolOpt('delayed_delete', default=False),
cfg.IntOpt('scrub_time', default=0)
]
def schedule_delete_from_backend(uri, conf, context, image_id, **kwargs):
"""
Given a uri and a time, schedule the deletion of an image.
"""
conf.register_opts(delete_opts)
if not conf.delayed_delete:
registry.update_image_metadata(context, image_id,
{'status': 'deleted'})

View File

@ -34,6 +34,11 @@ import glance.store.location
logger = logging.getLogger('glance.store.filesystem')
datadir_opt = cfg.StrOpt('filesystem_store_datadir')
CONF = cfg.CONF
CONF.register_opt(datadir_opt)
class StoreLocation(glance.store.location.StoreLocation):
@ -96,8 +101,6 @@ class ChunkedFile(object):
class Store(glance.store.base.Store):
datadir_opt = cfg.StrOpt('filesystem_store_datadir')
def get_schemes(self):
return ('file', 'filesystem')
@ -108,8 +111,6 @@ class Store(glance.store.base.Store):
this method. If the store was not able to successfully configure
itself, it should raise `exception.BadStoreConfiguration`
"""
self.conf.register_opt(self.datadir_opt)
self.datadir = self.conf.filesystem_store_datadir
if self.datadir is None:
reason = (_("Could not find %s in configuration options.") %

View File

@ -43,6 +43,16 @@ DEFAULT_CHUNKSIZE = 4 # in MiB
logger = logging.getLogger('glance.store.rbd')
rbd_opts = [
cfg.IntOpt('rbd_store_chunk_size', default=DEFAULT_CHUNKSIZE),
cfg.StrOpt('rbd_store_pool', default=DEFAULT_POOL),
cfg.StrOpt('rbd_store_user', default=DEFAULT_USER),
cfg.StrOpt('rbd_store_ceph_conf', default=DEFAULT_CONFFILE),
]
CONF = cfg.CONF
CONF.register_opts(rbd_opts)
class StoreLocation(glance.store.location.StoreLocation):
"""
@ -101,13 +111,6 @@ class Store(glance.store.base.Store):
EXAMPLE_URL = "rbd://<IMAGE>"
opts = [
cfg.IntOpt('rbd_store_chunk_size', default=DEFAULT_CHUNKSIZE),
cfg.StrOpt('rbd_store_pool', default=DEFAULT_POOL),
cfg.StrOpt('rbd_store_user', default=DEFAULT_USER),
cfg.StrOpt('rbd_store_ceph_conf', default=DEFAULT_CONFFILE),
]
def get_schemes(self):
return ('rbd',)
@ -118,7 +121,6 @@ class Store(glance.store.base.Store):
this method. If the store was not able to successfully configure
itself, it should raise `exception.BadStoreConfiguration`
"""
self.conf.register_opts(self.opts)
try:
self.chunk_size = self.conf.rbd_store_chunk_size * 1024 * 1024

View File

@ -33,6 +33,18 @@ import glance.store.location
logger = logging.getLogger('glance.store.s3')
s3_opts = [
cfg.StrOpt('s3_store_host'),
cfg.StrOpt('s3_store_access_key', secret=True),
cfg.StrOpt('s3_store_secret_key', secret=True),
cfg.StrOpt('s3_store_bucket'),
cfg.StrOpt('s3_store_object_buffer_dir'),
cfg.BoolOpt('s3_store_create_bucket_on_put', default=False),
]
CONF = cfg.CONF
CONF.register_opts(s3_opts)
class StoreLocation(glance.store.location.StoreLocation):
@ -189,15 +201,6 @@ class Store(glance.store.base.Store):
EXAMPLE_URL = "s3://<ACCESS_KEY>:<SECRET_KEY>@<S3_URL>/<BUCKET>/<OBJ>"
opts = [
cfg.StrOpt('s3_store_host'),
cfg.StrOpt('s3_store_access_key', secret=True),
cfg.StrOpt('s3_store_secret_key', secret=True),
cfg.StrOpt('s3_store_bucket'),
cfg.StrOpt('s3_store_object_buffer_dir'),
cfg.BoolOpt('s3_store_create_bucket_on_put', default=False),
]
def get_schemes(self):
return ('s3', 's3+http', 's3+https')
@ -208,7 +211,6 @@ class Store(glance.store.base.Store):
this method. If the store was not able to successfully configure
itself, it should raise `exception.BadStoreConfiguration`
"""
self.conf.register_opts(self.opts)
self.s3_host = self._option_get('s3_store_host')
access_key = self._option_get('s3_store_access_key')
secret_key = self._option_get('s3_store_secret_key')

View File

@ -31,9 +31,16 @@ from glance.common import utils
from glance.openstack.common import cfg
from glance.registry import client
logger = logging.getLogger('glance.store.scrubber')
scrubber_opts = [
cfg.BoolOpt('cleanup_scrubber', default=False),
cfg.IntOpt('cleanup_scrubber_time', default=86400)
]
CONF = cfg.CONF
CONF.register_opts(scrubber_opts)
class Daemon(object):
def __init__(self, wakeup_time=300, threads=1000):
@ -63,14 +70,8 @@ class Daemon(object):
class Scrubber(object):
CLEANUP_FILE = ".cleanup"
opts = [
cfg.BoolOpt('cleanup_scrubber', default=False),
cfg.IntOpt('cleanup_scrubber_time', default=86400)
]
def __init__(self, conf, **local_conf):
self.conf = conf
self.conf.register_opts(self.opts)
self.datadir = store.get_scrubber_datadir(conf)
self.cleanup = self.conf.cleanup_scrubber

View File

@ -37,12 +37,30 @@ try:
except ImportError:
pass
logger = logging.getLogger('glance.store.swift')
DEFAULT_CONTAINER = 'glance'
DEFAULT_LARGE_OBJECT_SIZE = 5 * 1024 # 5GB
DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 200 # 200M
ONE_MB = 1000 * 1024
logger = logging.getLogger('glance.store.swift')
swift_opts = [
cfg.BoolOpt('swift_enable_snet', default=False),
cfg.StrOpt('swift_store_auth_address'),
cfg.StrOpt('swift_store_user', secret=True),
cfg.StrOpt('swift_store_key', secret=True),
cfg.StrOpt('swift_store_auth_version', default='2'),
cfg.StrOpt('swift_store_container',
default=DEFAULT_CONTAINER),
cfg.IntOpt('swift_store_large_object_size',
default=DEFAULT_LARGE_OBJECT_SIZE),
cfg.IntOpt('swift_store_large_object_chunk_size',
default=DEFAULT_LARGE_OBJECT_CHUNK_SIZE),
cfg.BoolOpt('swift_store_create_container_on_put', default=False),
]
CONF = cfg.CONF
CONF.register_opts(swift_opts)
class StoreLocation(glance.store.location.StoreLocation):
@ -180,26 +198,10 @@ class Store(glance.store.base.Store):
CHUNKSIZE = 65536
opts = [
cfg.BoolOpt('swift_enable_snet', default=False),
cfg.StrOpt('swift_store_auth_address'),
cfg.StrOpt('swift_store_user', secret=True),
cfg.StrOpt('swift_store_key', secret=True),
cfg.StrOpt('swift_store_auth_version', default='2'),
cfg.StrOpt('swift_store_container',
default=DEFAULT_CONTAINER),
cfg.IntOpt('swift_store_large_object_size',
default=DEFAULT_LARGE_OBJECT_SIZE),
cfg.IntOpt('swift_store_large_object_chunk_size',
default=DEFAULT_LARGE_OBJECT_CHUNK_SIZE),
cfg.BoolOpt('swift_store_create_container_on_put', default=False),
]
def get_schemes(self):
return ('swift+https', 'swift', 'swift+http')
def configure(self):
self.conf.register_opts(self.opts)
self.snet = self.conf.swift_enable_snet
self.auth_version = self._option_get('swift_store_auth_version')

View File

@ -176,7 +176,7 @@ class ApiServer(Server):
super(ApiServer, self).__init__(test_dir, port)
self.server_name = 'api'
self.default_store = 'file'
self.known_stores = test_utils.get_default_stores()
self.known_stores = ", ".join(test_utils.get_default_stores())
self.key_file = ""
self.cert_file = ""
self.metadata_encryption_key = "012345678901234567890123456789ab"
@ -426,6 +426,7 @@ class FunctionalTest(test_utils.BaseTestCase):
log_files = []
def setUp(self):
super(FunctionalTest, self).setUp()
self.test_id, self.test_dir = test_utils.get_isolated_test_env()
self.api_protocol = 'http'
@ -463,6 +464,7 @@ class FunctionalTest(test_utils.BaseTestCase):
# from the tests
self._reset_database(self.registry_server.sql_connection)
self._reset_database(self.api_server.sql_connection)
super(FunctionalTest, self).tearDown()
def set_policy_rules(self, rules):
fap = open(self.policy_file, 'w')

View File

@ -69,8 +69,8 @@ class TestClientExceptions(functional.FunctionalTest):
super(TestClientExceptions, self).setUp()
self.port = utils.get_unused_port()
server = wsgi.Server()
conf = utils.TestConfigOpts({'bind_host': '127.0.0.1'})
server.start(ExceptionTestApp(), conf, self.port)
self.config(bind_host='127.0.0.1')
server.start(ExceptionTestApp(), self.conf, self.port)
self.client = client.BaseClient("127.0.0.1", self.port)
def _do_test_exception(self, path, exc_type):

View File

@ -86,9 +86,9 @@ class TestClientRedirects(functional.FunctionalTest):
self.port_two = utils.get_unused_port()
server_one = wsgi.Server()
server_two = wsgi.Server()
conf = utils.TestConfigOpts({'bind_host': '127.0.0.1'})
server_one.start(RedirectTestApp("one"), conf, self.port_one)
server_two.start(RedirectTestApp("two"), conf, self.port_two)
self.config(bind_host='127.0.0.1')
server_one.start(RedirectTestApp("one"), self.conf, self.port_one)
server_two.start(RedirectTestApp("two"), self.conf, self.port_two)
self.client = client.BaseClient("127.0.0.1", self.port_one)
def test_get_without_redirect(self):

View File

@ -21,8 +21,7 @@ class TestSchemaAPI(utils.BaseTestCase):
def setUp(self):
super(TestSchemaAPI, self).setUp()
conf = utils.TestConfigOpts()
self.schema_api = glance.schema.API(conf)
self.schema_api = glance.schema.API(self.conf)
def test_load_image_schema(self):
output = self.schema_api.get_schema('image')

View File

@ -92,6 +92,7 @@ class TestRBD(test_api.TestApi):
def setUp(self):
if self.disabled:
return
super(TestRBD, self).setUp()
import rados
try:
self.create_pool()
@ -99,11 +100,11 @@ class TestRBD(test_api.TestApi):
self.disabled_message = ("Failed to create pool: %s" % e)
self.disabled = True
return
super(TestRBD, self).setUp()
def tearDown(self):
if not self.disabled:
self.delete_pool()
if self.disabled:
return
self.delete_pool()
super(TestRBD, self).tearDown()
def create_pool(self):

View File

@ -89,6 +89,11 @@ class TestSSL(functional.FunctionalTest):
super(TestSSL, self).setUp()
def tearDown(self):
if getattr(self, 'inited', False):
return
super(TestSSL, self).tearDown()
@skip_if_disabled
def test_get_head_simple_post(self):
"""

View File

@ -23,6 +23,7 @@ import stubout
from glance import store
from glance.store import location
from glance.store import filesystem
from glance.tests import stubs
from glance.tests import utils as test_utils
@ -34,6 +35,7 @@ class StoreClearingUnitTest(test_utils.BaseTestCase):
# Ensure stores + locations cleared
store.STORES = {}
location.SCHEME_TO_CLS_MAP = {}
store.create_stores(self.conf)
def tearDown(self):
super(StoreClearingUnitTest, self).tearDown()
@ -50,18 +52,17 @@ class IsolatedUnitTest(StoreClearingUnitTest):
"""
def setUp(self):
super(IsolatedUnitTest, self).setUp()
self.test_id, self.test_dir = test_utils.get_isolated_test_env()
self.stubs = stubout.StubOutForTesting()
policy_file = self._copy_data_file('policy.json', self.test_dir)
options = {'sql_connection': 'sqlite://',
'verbose': False,
'debug': False,
'default_store': 'filesystem',
'known_stores': test_utils.get_default_stores(),
'filesystem_store_datadir': os.path.join(self.test_dir),
'policy_file': policy_file}
self.conf = test_utils.TestConfigOpts(options)
self.config(sql_connection='sqlite://',
verbose=False,
debug=False,
default_store='filesystem',
known_stores=test_utils.get_default_stores(),
filesystem_store_datadir=os.path.join(self.test_dir),
policy_file=policy_file)
super(IsolatedUnitTest, self).setUp()
stubs.stub_out_registry_and_store_server(self.stubs,
self.conf,
self.test_dir)

View File

@ -30,8 +30,6 @@ from glance.registry import client as rclient
from glance.tests.unit import base
from glance.tests import utils as test_utils
CONF = {'sql_connection': 'sqlite://'}
_gen_uuid = utils.generate_uuid
UUID1 = _gen_uuid()

View File

@ -17,12 +17,14 @@
import os.path
import shutil
import tempfile
import stubout
from glance.common import config
from glance.common import context
from glance.image_cache import pruner
from glance.openstack.common import cfg
from glance.tests import utils as test_utils
@ -38,12 +40,14 @@ class TestPasteApp(test_utils.BaseTestCase):
def _do_test_load_paste_app(self,
expected_app_type,
paste_group={},
paste_copy=True,
paste_flavor=None,
paste_config_file=None,
paste_append=None):
conf = test_utils.TestConfigOpts(groups=paste_group,
clean=False)
def _writeto(path, str):
with open(path, 'wb') as f:
f.write(str or '')
f.flush()
def _appendto(orig, copy, str):
shutil.copy(orig, copy)
@ -51,42 +55,47 @@ class TestPasteApp(test_utils.BaseTestCase):
f.write(str or '')
f.flush()
paste_to = os.path.join(conf.temp_file.replace('.conf',
'-paste.ini'))
if paste_copy:
paste_from = os.path.join(os.getcwd(),
'etc/glance-registry-paste.ini')
_appendto(paste_from, paste_to, paste_append)
self.config(flavor=paste_flavor,
config_file=paste_config_file,
group='paste_deploy')
app = config.load_paste_app(conf, 'glance-registry')
temp_file = os.path.join(tempfile.mkdtemp(), 'testcfg.conf')
self.assertEquals(expected_app_type, type(app))
try:
_writeto(temp_file, '[DEFAULT]\n')
if paste_copy:
os.remove(conf.temp_file)
os.remove(paste_to)
os.rmdir(os.path.dirname(conf.temp_file))
config.parse_args(['--config-file', temp_file])
paste_to = temp_file.replace('.conf', '-paste.ini')
if not paste_config_file:
paste_from = os.path.join(os.getcwd(),
'etc/glance-registry-paste.ini')
_appendto(paste_from, paste_to, paste_append)
app = config.load_paste_app(self.conf, 'glance-registry')
self.assertEquals(expected_app_type, type(app))
finally:
shutil.rmtree(os.path.dirname(temp_file))
def test_load_paste_app(self):
expected_middleware = context.UnauthenticatedContextMiddleware
self._do_test_load_paste_app(expected_middleware)
def test_load_paste_app_with_paste_flavor(self):
paste_group = {'paste_deploy': {'flavor': 'incomplete'}}
pipeline = ('[pipeline:glance-registry-incomplete]\n'
'pipeline = context registryapp')
type = context.ContextMiddleware
self._do_test_load_paste_app(type, paste_group, paste_append=pipeline)
expected_middleware = context.ContextMiddleware
self._do_test_load_paste_app(expected_middleware,
paste_flavor='incomplete',
paste_append=pipeline)
def test_load_paste_app_with_paste_config_file(self):
paste_config_file = os.path.join(os.getcwd(),
'etc/glance-registry-paste.ini')
paste_group = {'paste_deploy': {'config_file': paste_config_file}}
expected_middleware = context.UnauthenticatedContextMiddleware
self._do_test_load_paste_app(expected_middleware,
paste_group, paste_copy=False)
paste_config_file=paste_config_file)
def test_load_paste_app_with_conf_name(self):
def fake_join(*args):
@ -100,12 +109,11 @@ class TestPasteApp(test_utils.BaseTestCase):
orig_join = os.path.join
self.stubs.Set(os.path, 'join', fake_join)
conf = config.GlanceCacheConfigOpts()
conf([])
config.parse_cache_args([])
self.stubs.Set(config, 'setup_logging', lambda *a: None)
self.stubs.Set(pruner, 'Pruner', lambda conf, **lc: 'pruner')
app = config.load_paste_app(conf, 'glance-pruner')
app = config.load_paste_app(self.conf, 'glance-pruner')
self.assertEquals('pruner', app)

View File

@ -43,19 +43,16 @@ if UUID1 > UUID2:
UUID1, UUID2 = UUID2, UUID1
CONF = {'sql_connection': 'sqlite://',
'verbose': False,
'debug': False}
class BaseDBTestCase(base.IsolatedUnitTest):
def setUp(self):
super(BaseDBTestCase, self).setUp()
conf = test_utils.TestConfigOpts(CONF)
self.config(sql_connection='sqlite://',
verbose=False,
debug=False)
self.adm_context = context.RequestContext(is_admin=True)
self.context = context.RequestContext(is_admin=False)
db_api.configure_db(conf)
db_api.configure_db(self.conf)
self.destroy_fixtures()
self.create_fixtures()
@ -229,10 +226,12 @@ class TestPagingOrder(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestPagingOrder, self).setUp()
conf = test_utils.TestConfigOpts(CONF)
self.config(sql_connection='sqlite://',
verbose=False,
debug=False)
self.adm_context = context.RequestContext(is_admin=True)
self.context = context.RequestContext(is_admin=False)
db_api.configure_db(conf)
db_api.configure_db(self.conf)
self.destroy_fixtures()
self.create_fixtures()

View File

@ -91,15 +91,13 @@ def stub_out_registry_image_update(stubs, conf):
class TestHttpStore(base.StoreClearingUnitTest):
def setUp(self):
self.config(default_store='http',
known_stores=['glance.store.http.Store'])
super(TestHttpStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_http_backend(self.stubs)
Store.CHUNKSIZE = 2
self.store = Store({})
self.conf = utils.TestConfigOpts({
'default_store': 'http',
'known_stores': "glance.store.http.Store",
})
configure_registry_client(self.conf)
def test_http_get(self):

View File

@ -254,12 +254,11 @@ class TestImageCacheXattr(test_utils.BaseTestCase,
self.inited = True
self.disabled = False
self.conf = test_utils.TestConfigOpts({
'image_cache_dir': self.cache_dir,
'image_cache_driver': 'xattr',
'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0',
'registry_port': 9191})
self.config(image_cache_dir=self.cache_dir,
image_cache_driver='xattr',
image_cache_max_size=1024 * 5,
registry_host='0.0.0.0',
registry_port=9191)
self.cache = image_cache.ImageCache(self.conf)
if not xattr_writes_supported(self.cache_dir):
@ -302,12 +301,11 @@ class TestImageCacheSqlite(test_utils.BaseTestCase,
self.disabled = False
self.cache_dir = os.path.join("/", "tmp", "test.cache.%d" %
random.randint(0, 1000000))
self.conf = test_utils.TestConfigOpts({
'image_cache_dir': self.cache_dir,
'image_cache_driver': 'sqlite',
'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0',
'registry_port': 9191})
self.config(image_cache_dir=self.cache_dir,
image_cache_driver='sqlite',
image_cache_max_size=1024 * 5,
registry_host='0.0.0.0',
registry_port=9191)
self.cache = image_cache.ImageCache(self.conf)
def tearDown(self):

View File

@ -146,10 +146,8 @@ class TestMigrations(utils.BaseTestCase):
that there are no errors in the version scripts for each engine
"""
for key, engine in self.engines.items():
conf = utils.TestConfigOpts({
'sql_connection': TestMigrations.TEST_DATABASES[key]})
conf.register_opt(cfg.StrOpt('sql_connection'))
self._walk_versions(conf)
self.config(sql_connection=TestMigrations.TEST_DATABASES[key])
self._walk_versions(self.conf)
def test_version_control_existing_db(self):
"""
@ -158,11 +156,9 @@ class TestMigrations(utils.BaseTestCase):
without errors.
"""
for key, engine in self.engines.items():
conf = utils.TestConfigOpts({
'sql_connection': TestMigrations.TEST_DATABASES[key]})
conf.register_opt(cfg.StrOpt('sql_connection'))
self.config(sql_connection=TestMigrations.TEST_DATABASES[key])
self._create_unversioned_001_db(engine)
self._walk_versions(conf, initial_version=1)
self._walk_versions(self.conf, initial_version=1)
def _create_unversioned_001_db(self, engine):
# Create the initial version of the images table
@ -221,10 +217,8 @@ class TestMigrations(utils.BaseTestCase):
the image_properties table back into the base image table.
"""
for key, engine in self.engines.items():
conf = utils.TestConfigOpts({
'sql_connection': TestMigrations.TEST_DATABASES[key]})
conf.register_opt(cfg.StrOpt('sql_connection'))
self._no_data_loss_2_to_3_to_2(engine, conf)
self.config(sql_connection=TestMigrations.TEST_DATABASES[key])
self._no_data_loss_2_to_3_to_2(engine, self.conf)
def _no_data_loss_2_to_3_to_2(self, engine, conf):
migration_api.version_control(conf, version=0)
@ -320,10 +314,8 @@ class TestMigrations(utils.BaseTestCase):
def test_no_data_loss_14_to_15(self):
for key, engine in self.engines.items():
conf = utils.TestConfigOpts({
'sql_connection': TestMigrations.TEST_DATABASES[key]})
conf.register_opt(cfg.StrOpt('sql_connection'))
self._check_no_data_loss_14_to_15(engine, conf)
self.config(sql_connection=TestMigrations.TEST_DATABASES[key])
self._check_no_data_loss_14_to_15(engine, self.conf)
def _check_no_data_loss_14_to_15(self, engine, conf):
"""

View File

@ -38,10 +38,10 @@ class TestInvalidNotifier(utils.BaseTestCase):
"""Test that notifications are generated appropriately"""
def test_cannot_create(self):
conf = utils.TestConfigOpts({"notifier_strategy": "invalid_notifier"})
self.config(notifier_strategy="invalid_notifier")
self.assertRaises(exception.InvalidNotifierStrategy,
notifier.Notifier,
conf)
self.conf)
class TestLoggingNotifier(utils.BaseTestCase):
@ -49,10 +49,10 @@ class TestLoggingNotifier(utils.BaseTestCase):
def setUp(self):
super(TestLoggingNotifier, self).setUp()
conf = utils.TestConfigOpts({"notifier_strategy": "logging"})
self.config(notifier_strategy="logging")
self.called = False
self.logger = logging.getLogger("glance.notifier.logging_notifier")
self.notifier = notifier.Notifier(conf)
self.notifier = notifier.Notifier(self.conf)
def _called(self, msg):
self.called = msg
@ -81,8 +81,8 @@ class TestNoopNotifier(utils.BaseTestCase):
def setUp(self):
super(TestNoopNotifier, self).setUp()
conf = utils.TestConfigOpts({"notifier_strategy": "noop"})
self.notifier = notifier.Notifier(conf)
self.config(notifier_strategy="noop")
self.notifier = notifier.Notifier(self.conf)
def test_warn(self):
self.notifier.warn("test_event", "test_message")
@ -110,10 +110,9 @@ class TestRabbitNotifier(utils.BaseTestCase):
self.notify_kombu.RabbitStrategy._send_message = self._send_message
self.notify_kombu.RabbitStrategy._connect = _fake_connect
self.called = False
self.conf = utils.TestConfigOpts({"notifier_strategy": "rabbit",
"rabbit_retry_backoff": 0,
"rabbit_notification_topic":
"fake_topic"})
self.config(notifier_strategy="rabbit",
rabbit_retry_backoff=0,
rabbit_notification_topic="fake_topic")
self.notifier = notifier.Notifier(self.conf)
def _send_message(self, message, routing_key):
@ -338,8 +337,8 @@ class TestQpidNotifier(utils.BaseTestCase):
self.mocker.ReplayAll()
conf = utils.TestConfigOpts({"notifier_strategy": "qpid"})
notifier = self.notify_qpid.QpidStrategy(conf)
self.config(notifier_strategy="qpid")
notifier = self.notify_qpid.QpidStrategy(self.conf)
if priority == 'info':
notifier.info(test_msg)
elif priority == 'warn':
@ -382,10 +381,9 @@ class TestRabbitContentType(utils.BaseTestCase):
self.stubs.Set(glance.notifier.notify_kombu.RabbitStrategy, '_connect',
_fake_connect)
self.called = False
self.conf = utils.TestConfigOpts({"notifier_strategy": "rabbit",
"rabbit_retry_backoff": 0,
"rabbit_notification_topic":
"fake_topic"})
self.config(notifier_strategy="rabbit",
rabbit_retry_backoff=0,
rabbit_notification_topic="fake_topic")
self.notifier = notifier.Notifier(self.conf)
def _fake_exchange(self):

View File

@ -25,6 +25,7 @@ import boto.s3.connection
from glance.common import exception
from glance.common import utils
from glance.openstack.common import cfg
from glance.store import UnsupportedBackend
from glance.store.location import get_location_from_uri
from glance.store.s3 import Store, get_s3_location
@ -161,10 +162,11 @@ class TestStore(base.StoreClearingUnitTest):
def setUp(self):
"""Establish a clean test environment"""
self.config(**S3_CONF)
super(TestStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_s3(self.stubs)
self.store = Store(test_utils.TestConfigOpts(S3_CONF))
self.store = Store(self.conf)
def tearDown(self):
"""Clear the test environment"""
@ -261,7 +263,8 @@ class TestStore(base.StoreClearingUnitTest):
expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents)
self.store = Store(test_utils.TestConfigOpts(new_conf))
self.config(**new_conf)
self.store = Store(self.conf)
location, size, checksum = self.store.add(expected_image_id,
image_s3,
expected_s3_size)
@ -290,10 +293,11 @@ class TestStore(base.StoreClearingUnitTest):
def _option_required(self, key):
conf = S3_CONF.copy()
del conf[key]
conf[key] = None
try:
self.store = Store(test_utils.TestConfigOpts(conf))
self.config(**conf)
self.store = Store(self.conf)
return self.store.add == self.store.add_disabled
except:
return False

View File

@ -51,7 +51,6 @@ class TestSchemaAPI(test_utils.BaseTestCase):
def setUp(self):
super(TestSchemaAPI, self).setUp()
self.conf = test_utils.TestConfigOpts()
self.schema_api = glance.schema.API(self.conf, FAKE_BASE_PROPERTIES)
def test_get_schema(self):
@ -180,7 +179,7 @@ class TestSchemaAPI(test_utils.BaseTestCase):
self.assertEqual(output, expected)
def test_get_image_schema_with_additional_properties_disabled(self):
self.conf.allow_additional_image_properties = False
self.config(allow_additional_image_properties=False)
output = self.schema_api.get_schema('image')
expected = {
'name': 'image',
@ -201,7 +200,7 @@ class TestSchemaAPI(test_utils.BaseTestCase):
self.assertEqual(output, expected)
def test_get_image_schema_with_additional_properties_enabled(self):
self.conf.allow_additional_image_properties = True
self.config(allow_additional_image_properties=True)
output = self.schema_api.get_schema('image')
expected = {
'name': 'image',
@ -222,7 +221,7 @@ class TestSchemaAPI(test_utils.BaseTestCase):
self.assertEqual(output, expected)
def test_get_other_schema_with_additional_image_properties_enabled(self):
self.conf.allow_additional_image_properties = True
self.config(allow_additional_image_properties=False)
output = self.schema_api.get_schema('fake1')
expected = {
'name': 'fake1',

View File

@ -29,11 +29,9 @@ from glance.tests import utils
class TestStoreLocation(base.StoreClearingUnitTest):
def setUp(self):
self.config(known_stores=utils.get_default_stores(),
default_store='file')
super(TestStoreLocation, self).setUp()
self.conf = utils.TestConfigOpts({
'known_stores': utils.get_default_stores(),
'default_store': 'file',
})
def test_get_location_from_uri_back_to_uri(self):
"""

View File

@ -44,7 +44,7 @@ MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_PUT_OBJECT_CALLS = 0
SWIFT_CONF = {'verbose': True,
'debug': True,
'known_stores': "glance.store.swift.Store",
'known_stores': ['glance.store.swift.Store'],
'default_store': 'swift',
'swift_store_user': 'user',
'swift_store_key': 'key',
@ -55,7 +55,7 @@ SWIFT_CONF = {'verbose': True,
# We stub out as little as possible to ensure that the code paths
# between glance.store.swift and swift.common.client are tested
# thoroughly
def stub_out_swift_common_client(stubs, conf):
def stub_out_swift_common_client(stubs, swift_store_auth_version):
fixture_containers = ['glance']
fixture_headers = {'glance/%s' % FAKE_UUID:
@ -165,7 +165,7 @@ def stub_out_swift_common_client(stubs, conf):
if 'http' in url and '://' not in url:
raise ValueError('Invalid url %s' % url)
# Check the auth version against the configured value
if conf['swift_store_auth_version'] != auth_version:
if swift_store_auth_version != auth_version:
msg = 'AUTHENTICATION failed (version mismatch)'
raise swift.common.client.ClientException(msg)
return None, None
@ -192,7 +192,7 @@ class SwiftTests(object):
@property
def swift_store_user(self):
return urllib.quote(self.conf['swift_store_user'])
return urllib.quote(self.conf.swift_store_user)
def test_get_size(self):
"""
@ -314,14 +314,14 @@ class SwiftTests(object):
expected_swift_contents = "*" * expected_swift_size
expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest()
self.conf['swift_store_auth_address'] = variation
image_swift = StringIO.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(test_utils.TestConfigOpts(self.conf))
self.config(swift_store_auth_address=variation)
self.store = Store(self.conf)
location, size, checksum = self.store.add(image_id, image_swift,
expected_swift_size)
@ -343,10 +343,11 @@ class SwiftTests(object):
Tests that adding an image with a non-existing container
raises an appropriate exception
"""
self.conf['swift_store_create_container_on_put'] = 'False'
self.conf['swift_store_container'] = 'noexist'
self.config(swift_store_create_container_on_put=False,
swift_store_container='noexist')
self.store = Store(self.conf)
image_swift = StringIO.StringIO("nevergonnamakeit")
self.store = Store(test_utils.TestConfigOpts(self.conf))
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
@ -369,8 +370,6 @@ class SwiftTests(object):
Tests that adding an image with a non-existing container
creates the container automatically if flag is set
"""
self.conf['swift_store_create_container_on_put'] = 'True'
self.conf['swift_store_container'] = 'noexist'
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
@ -383,7 +382,9 @@ class SwiftTests(object):
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(test_utils.TestConfigOpts(self.conf))
self.config(swift_store_create_container_on_put=True,
swift_store_container='noexist')
self.store = Store(self.conf)
location, size, checksum = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
@ -408,7 +409,6 @@ class SwiftTests(object):
and then verify that there have been a number of calls to
put_object()...
"""
self.conf['swift_store_container'] = 'glance'
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
@ -421,7 +421,8 @@ class SwiftTests(object):
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(test_utils.TestConfigOpts(self.conf))
self.config(swift_store_container='glance')
self.store = Store(self.conf)
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
try:
@ -460,8 +461,6 @@ class SwiftTests(object):
Bug lp:891738
"""
self.conf['swift_store_container'] = 'glance'
# Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
@ -477,7 +476,8 @@ class SwiftTests(object):
# Temporarily set Swift MAX_SWIFT_OBJECT_SIZE to 1KB and add our image,
# explicitly setting the image_length to 0
self.store = Store(test_utils.TestConfigOpts(self.conf))
self.config(swift_store_container='glance')
self.store = Store(self.conf)
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
global MAX_SWIFT_OBJECT_SIZE
@ -521,10 +521,12 @@ class SwiftTests(object):
FAKE_UUID, image_swift, 0)
def _option_required(self, key):
del self.conf[key]
conf = self.getConfig()
conf[key] = None
try:
self.store = Store(test_utils.TestConfigOpts(self.conf))
self.config(**conf)
self.store = Store(self.conf)
return self.store.add == self.store.add_disabled
except:
return False
@ -579,11 +581,13 @@ class TestStoreAuthV1(base.StoreClearingUnitTest, SwiftTests):
def setUp(self):
"""Establish a clean test environment"""
conf = self.getConfig()
self.config(**conf)
super(TestStoreAuthV1, self).setUp()
self.conf = self.getConfig()
self.stubs = stubout.StubOutForTesting()
stub_out_swift_common_client(self.stubs, self.conf)
self.store = Store(test_utils.TestConfigOpts(self.conf))
stub_out_swift_common_client(self.stubs,
conf['swift_store_auth_version'])
self.store = Store(self.conf)
def tearDown(self):
"""Clear the test environment"""

View File

@ -31,9 +31,8 @@ class VersionsTest(base.IsolatedUnitTest):
def test_get_version_list(self):
req = webob.Request.blank('/', base_url='http://0.0.0.0:9292/')
req.accept = 'application/json'
config_opts = {'bind_host': '0.0.0.0', 'bind_port': 9292}
conf = utils.TestConfigOpts(config_opts)
res = versions.Controller(conf).index(req)
self.config(bind_host='0.0.0.0', bind_port=9292)
res = versions.Controller(self.conf).index(req)
self.assertEqual(res.status_int, 300)
self.assertEqual(res.content_type, 'application/json')
results = json.loads(res.body)['versions']

View File

@ -55,16 +55,13 @@ class TestRegistryDb(test_utils.BaseTestCase):
API controller results in a) an Exception being thrown and b)
a message being logged to the registry log file...
"""
bad_conf = test_utils.TestConfigOpts({
'verbose': True,
'debug': True,
'sql_connection': 'baddriver:///'
})
self.config(verbose=True, debug=True, sql_connection='baddriver:///')
# We set this to None to trigger a reconfigure, otherwise
# other modules may have already correctly configured the DB
db_api._ENGINE = None
self.assertRaises((ImportError, exc.ArgumentError),
db_api.configure_db, bad_conf)
db_api.configure_db, self.conf)
exc_raised = False
self.log_written = False
@ -74,7 +71,7 @@ class TestRegistryDb(test_utils.BaseTestCase):
self.stubs.Set(db_api.logger, 'error', fake_log_error)
try:
api_obj = rserver.API(bad_conf)
api_obj = rserver.API(self.conf)
except exc.ArgumentError:
exc_raised = True
except ImportError:

View File

@ -111,8 +111,7 @@ class TestImageAccessDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImageAccessDeserializer, self).setUp()
conf = test_utils.TestConfigOpts()
schema_api = glance.schema.API(conf)
schema_api = glance.schema.API(self.conf)
self.deserializer = image_access.RequestDeserializer({}, schema_api)
def test_create(self):
@ -136,8 +135,7 @@ class TestImageAccessDeserializerWithExtendedSchema(test_utils.BaseTestCase):
def setUp(self):
super(TestImageAccessDeserializerWithExtendedSchema, self).setUp()
conf = test_utils.TestConfigOpts()
schema_api = glance.schema.API(conf)
schema_api = glance.schema.API(self.conf)
props = {
'color': {
'type': 'string',

View File

@ -28,11 +28,10 @@ class TestImagesController(base.StoreClearingUnitTest):
def setUp(self):
super(TestImagesController, self).setUp()
conf = glance.tests.utils.TestConfigOpts({
'verbose': True,
'debug': True,
})
self.controller = glance.api.v2.image_data.ImageDataController(conf,
self.config(verbose=True, debug=True)
controller_class = glance.api.v2.image_data.ImageDataController
self.controller = controller_class(self.conf,
db_api=unit_test_utils.FakeDB(),
store_api=unit_test_utils.FakeStoreAPI())

View File

@ -140,7 +140,6 @@ class TestImagesDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesDeserializer, self).setUp()
self.conf = test_utils.TestConfigOpts()
schema_api = glance.schema.API(self.conf)
self.deserializer = glance.api.v2.images.RequestDeserializer(
{}, schema_api)
@ -215,8 +214,7 @@ class TestImagesDeserializerWithExtendedSchema(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesDeserializerWithExtendedSchema, self).setUp()
conf = test_utils.TestConfigOpts()
schema_api = glance.schema.API(conf)
schema_api = glance.schema.API(self.conf)
props = {
'pants': {
'type': 'string',
@ -269,8 +267,7 @@ class TestImagesDeserializerWithAdditionalProperties(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesDeserializerWithAdditionalProperties, self).setUp()
self.conf = test_utils.TestConfigOpts()
self.conf.allow_additional_image_properties = True
self.config(allow_additional_image_properties=True)
schema_api = glance.schema.API(self.conf)
self.deserializer = glance.api.v2.images.RequestDeserializer(
{}, schema_api)
@ -283,7 +280,7 @@ class TestImagesDeserializerWithAdditionalProperties(test_utils.BaseTestCase):
self.assertEqual(expected, output)
def test_create_with_additional_properties_disallowed(self):
self.conf.allow_additional_image_properties = False
self.config(allow_additional_image_properties=False)
request = unit_test_utils.FakeRequest()
request.body = json.dumps({'foo': 'bar'})
self.assertRaises(exception.InvalidObject,
@ -309,7 +306,7 @@ class TestImagesDeserializerWithAdditionalProperties(test_utils.BaseTestCase):
self.assertEqual(expected, output)
def test_update_with_additional_properties_disallowed(self):
self.conf.allow_additional_image_properties = False
self.config(allow_additional_image_properties=False)
request = unit_test_utils.FakeRequest()
request.body = json.dumps({'foo': 'bar'})
self.assertRaises(exception.InvalidObject,
@ -320,8 +317,7 @@ class TestImagesSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesSerializer, self).setUp()
conf = test_utils.TestConfigOpts()
schema_api = glance.schema.API(conf)
schema_api = glance.schema.API(self.conf)
self.serializer = glance.api.v2.images.ResponseSerializer(schema_api)
def test_index(self):
@ -497,8 +493,7 @@ class TestImagesSerializerWithExtendedSchema(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesSerializerWithExtendedSchema, self).setUp()
self.conf = test_utils.TestConfigOpts()
self.conf.allow_additional_image_properties = False
self.config(allow_additional_image_properties=False)
self.schema_api = glance.schema.API(self.conf)
props = {
'color': {
@ -580,8 +575,7 @@ class TestImagesSerializerWithAdditionalProperties(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesSerializerWithAdditionalProperties, self).setUp()
self.conf = test_utils.TestConfigOpts()
self.conf.allow_additional_image_properties = True
self.config(allow_additional_image_properties=True)
self.schema_api = glance.schema.API(self.conf)
self.fixture = {
'id': unit_test_utils.UUID2,
@ -656,7 +650,7 @@ class TestImagesSerializerWithAdditionalProperties(test_utils.BaseTestCase):
self.assertEqual(expected, json.loads(response.body))
def test_show_with_additional_properties_disabled(self):
self.conf.allow_additional_image_properties = False
self.config(allow_additional_image_properties=False)
serializer = glance.api.v2.images.ResponseSerializer(self.schema_api)
expected = {
'image': {

View File

@ -23,8 +23,7 @@ class TestSchemasController(test_utils.BaseTestCase):
def setUp(self):
super(TestSchemasController, self).setUp()
conf = test_utils.TestConfigOpts()
self.schema_api = glance.schema.API(conf)
self.schema_api = glance.schema.API(self.conf)
self.controller = schemas.Controller({}, self.schema_api)
def test_index(self):

View File

@ -23,7 +23,6 @@ import os
import random
import socket
import subprocess
import tempfile
import unittest
import nose.plugins.skip
@ -31,8 +30,11 @@ import nose.plugins.skip
from glance.common import config
from glance.common import utils
from glance.common import wsgi
from glance.openstack.common import cfg
from glance import store
CONF = cfg.CONF
def get_isolated_test_env():
"""
@ -50,79 +52,28 @@ class BaseTestCase(unittest.TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self.conf = CONF
def tearDown(self):
super(BaseTestCase, self).tearDown()
CONF.reset()
def config(self, **kw):
"""
Override some configuration values.
class TestConfigOpts(config.GlanceConfigOpts):
"""
Support easily controllable config for unit tests, avoiding the
need to manipulate config files directly.
The keyword arguments are the names of configuration options to
override and their values.
Configuration values are provided as a dictionary of key-value pairs,
in the simplest case feeding into the DEFAULT group only.
If a group argument is supplied, the overrides are applied to
the specified configuration option group.
Non-default groups may also populated via nested dictionaries, e.g.
{'snafu': {'foo': 'bar', 'bells': 'whistles'}}
equates to config of form:
[snafu]
foo = bar
bells = whistles
The config so provided is dumped to a temporary file, with its path
exposed via the temp_file property.
:param test_values: dictionary of key-value pairs for the
DEFAULT group
:param groups: nested dictionary of key-value pairs for
non-default groups
:param clean: flag to trigger clean up of temporary directory
"""
def __init__(self, test_values={}, groups={}, clean=True):
super(TestConfigOpts, self).__init__()
self._test_values = test_values
self._test_groups = groups
self.clean = clean
self.temp_file = os.path.join(tempfile.mkdtemp(), 'testcfg.conf')
self()
store.create_stores(self)
def __call__(self):
self._write_tmp_config_file()
try:
super(TestConfigOpts, self).__call__(['--config-file',
self.temp_file])
finally:
if self.clean:
os.remove(self.temp_file)
os.rmdir(os.path.dirname(self.temp_file))
def _write_tmp_config_file(self):
contents = '[DEFAULT]\n'
for key, value in self._test_values.items():
contents += '%s = %s\n' % (key, value)
for group, settings in self._test_groups.items():
contents += '[%s]\n' % group
for key, value in settings.items():
contents += '%s = %s\n' % (key, value)
try:
with open(self.temp_file, 'wb') as f:
f.write(contents)
f.flush()
except Exception, e:
if self.clean:
os.remove(self.temp_file)
os.rmdir(os.path.dirname(self.temp_file))
raise e
All overrides are automatically cleared at the end of the current
test by the tearDown() method.
"""
group = kw.pop('group', None)
for k, v in kw.iteritems():
CONF.set_override(k, v, group)
class skip_test(object):
@ -328,15 +279,13 @@ def find_executable(cmdname):
def get_default_stores():
# Default test stores
known_stores = [
return [
"glance.store.filesystem.Store",
"glance.store.http.Store",
"glance.store.rbd.Store",
"glance.store.s3.Store",
"glance.store.swift.Store",
]
# Made in a format that the config can read
return ", ".join(known_stores)
def get_unused_port():

View File

@ -6,7 +6,7 @@ import sys
import keystoneclient.v2_0.client
import glance.common.context
import glance.openstack.common.cfg
from glance.openstack.common import cfg
import glance.registry.context
import glance.db.api as db_api
@ -57,23 +57,21 @@ def update_image_owners(image_owner_map, db, context):
if __name__ == "__main__":
config = glance.openstack.common.cfg.CommonConfigOpts(project='glance',
prog='glance-registry')
config = cfg.CONF
extra_cli_opts = [
glance.openstack.common.cfg.BoolOpt('dry-run',
help='Print output but do not make db changes.'),
glance.openstack.common.cfg.StrOpt('keystone-auth-uri',
help='Authentication endpoint'),
glance.openstack.common.cfg.StrOpt('keystone-admin-tenant-name',
help='Administrative user\'s tenant name'),
glance.openstack.common.cfg.StrOpt('keystone-admin-user',
help='Administrative user\'s id'),
glance.openstack.common.cfg.StrOpt('keystone-admin-password',
help='Administrative user\'s password'),
cfg.BoolOpt('dry-run',
help='Print output but do not make db changes.'),
cfg.StrOpt('keystone-auth-uri',
help='Authentication endpoint'),
cfg.StrOpt('keystone-admin-tenant-name',
help='Administrative user\'s tenant name'),
cfg.StrOpt('keystone-admin-user',
help='Administrative user\'s id'),
cfg.StrOpt('keystone-admin-password',
help='Administrative user\'s password'),
]
config.register_cli_opts(extra_cli_opts)
config()
config.register_opts(glance.common.context.ContextMiddleware.opts)
config(project='glance', prog='glance-registry')
db_api.configure_db(config)