diff --git a/bin/swfiller b/bin/swfiller index 0365dfa..a93183a 100755 --- a/bin/swfiller +++ b/bin/swfiller @@ -2,6 +2,7 @@ # -*- encoding: utf-8 -*- import argparse +import logging import os import pickle import sys @@ -10,10 +11,8 @@ import eventlet from keystoneclient.v2_0 import client as ksclient sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) -from swsync.utils import get_config -from swsync.filler import (load_index, load_containers_index, - create_swift_account, fill_swift, - delete_account_content, delete_account) +from swsync import filler +from swsync import utils def main(): @@ -39,8 +38,14 @@ def main(): parser.add_argument('-s', help='Specify the MAX file size. Files ' 'will be from 1024 Bytes to MAX Bytes') + parser.add_argument('-d', '--log-level', + dest='log_level', + default='info', + help='Specify the log level') args = parser.parse_args() + utils.set_logging(args.log_level) + if not args.create and not args.delete: parser.print_help() sys.exit(1) @@ -48,34 +53,35 @@ def main(): parser.print_help() sys.exit(1) - concurrency = int(get_config('filler', 'concurrency')) + concurrency = int(utils.get_config('filler', 'concurrency')) pile = eventlet.GreenPile(concurrency) pool = eventlet.GreenPool(concurrency) - _config = get_config('auth', - 'keystone_origin_admin_credentials').split(':') + _config = utils.get_config('auth', + 'keystone_origin_admin_credentials').split(':') tenant_name, username, password = _config client = ksclient.Client( - auth_url=get_config('auth', 'keystone_origin'), + auth_url=utils.get_config('auth', 'keystone_origin'), username=username, password=password, tenant_name=tenant_name) - index_path = get_config('filler', 'index_path') - index_containers_path = get_config('filler', 'index_containers_path') + index_path = utils.get_config('filler', 'index_path') + index_containers_path = utils.get_config('filler', 'index_containers_path') if args.l: - index = load_index() - index_containers = load_containers_index() + index = filler.load_index() + index_containers = filler.load_containers_index() else: index = {} index_containers = {} if args.create: if args.a is None or not args.a.isdigit(): - print("Provide account amount by setting '-a' option") + logging.info("Provide account amount by setting '-a' option") sys.exit(1) if args.u is None or not args.u.isdigit(): - print("Provide user by account amount by setting '-u' option") + logging.info("Provide user by account " + "amount by setting '-u' option") sys.exit(1) if args.s is None: fmax = 1024 @@ -84,29 +90,29 @@ def main(): fmax = max(1024, int(args.s)) else: fmax = 1024 - created = create_swift_account(client, pile, - int(args.a), - int(args.u), index=index) + created = filler.create_swift_account(client, pile, + int(args.a), + int(args.u), index=index) if args.f is not None and args.c is not None: if args.f.isdigit() and args.c.isdigit(): - fill_swift(pool, created, int(args.c), - int(args.f), fmax, - index_containers=index_containers) + filler.fill_swift(pool, created, int(args.c), + int(args.f), fmax, + index_containers=index_containers) else: - print("'-c' and '-f' options must be integers") + logging.info("'-c' and '-f' options must be integers") sys.exit(1) pickle.dump(index, open(index_path, 'w')) pickle.dump(index_containers, open(index_containers_path, 'w')) if args.delete: - index = load_index() + index = filler.load_index() for k, v in index.items(): user_info_list = [user[1] for user in v] # Take the first user we find - delete_account_content(k, v[0]) - delete_account(client, user_info_list, k) + filler.delete_account_content(k, v[0]) + filler.delete_account(client, user_info_list, k) del index[k] if not os.path.exists(index_path): - print "No index_path to load." + logging.info("No index_path to load.") sys.exit(1) pickle.dump(index, open(index_path, 'w')) diff --git a/bin/swsync b/bin/swsync index 4b4a7dc..1a9e9fb 100755 --- a/bin/swsync +++ b/bin/swsync @@ -16,32 +16,16 @@ # License for the specific language governing permissions and limitations # under the License. import sys -import logging import optparse import swsync.accounts -from swsync.utils import parse_ini, ConfigurationError +from swsync.utils import parse_ini, ConfigurationError, set_logging + class Main(object): def __init__(self): self.options = {} - def set_logging(self): - logger = logging.getLogger() - logger.setLevel({ - 'debug': logging.DEBUG, - 'info': logging.INFO, - 'warning': logging.WARNING, - 'error': logging.ERROR, - 'critical': logging.CRITICAL}.get( - self.options.log_level.lower() - )) - loghandler = logging.StreamHandler() - logger.addHandler(loghandler) - logger = logging.LoggerAdapter(logger, 'swsync') - logformat = logging.Formatter('%(asctime)s %(levelname)s %(message)s') - loghandler.setFormatter(logformat) - def main(self): usage = "usage: %prog [OPTIONS] [CONF_FILE]" parser = optparse.OptionParser(usage=usage) @@ -60,7 +44,7 @@ class Main(object): parser.print_help() sys.exit(1) - self.set_logging() + set_logging(self.options.log_level.lower()) #beurk swsync.utils.CONFIG = conf swsync.accounts.main() diff --git a/swsync/filler.py b/swsync/filler.py index 107ca57..3ca520d 100644 --- a/swsync/filler.py +++ b/swsync/filler.py @@ -25,15 +25,16 @@ # Read pickled index file (index_path) to process a deletion # of objects/containers store in swift for each account then delete # accounts. + import os import sys +import copy +import logging import pickle import random import string - -from StringIO import StringIO -from copy import copy +import StringIO from swiftclient import client as sclient @@ -100,16 +101,16 @@ def create_swift_account(client, pile, account = get_rand_str(mode='account_') # Create a tenant. In swift this is an account account_id = client.tenants.create(account).id - print 'Account created %s' % account + logging.info('Account created %s' % account) r = create_swift_user(client, account, account_id, user_amount) - print 'Users created %s in account %s' % (str(r), account) + logging.info('Users created %s in account %s' % (str(r), account)) return account, account_id, r created = {} # Spawn a greenlet for each account i = 0 for i in range(account_amount): i += 1 - print "[Keystone Start OPs %s/%s]" % (i, account_amount) + logging.info("[Keystone Start OPs %s/%s]" % (i, account_amount)) pile.spawn(_create_account, user_amount) for account, account_id, ret in pile: index[(account, account_id)] = ret @@ -130,8 +131,9 @@ def delete_account_content(acc, user): in container_infos[1]] # Delete objects for obj in object_names: - print "Deleting object %s in container %s for account %s" % ( - obj, container, str(acc)) + logging.info("\ + Deleting object %s in container %s for account %s" % + (obj, container, str(acc))) cnx.delete_object(container, obj) @@ -140,9 +142,9 @@ def delete_account(client, user_id, acc): if not isinstance(user_id, list): user_id = (user_id,) for uid in user_id: - print "Delete user with id : %s" % uid + logging.info("Delete user with id : %s" % uid) client.users.delete(uid) - print "Delete account %s" % account_id + logging.info("Delete account %s" % account_id) client.tenants.delete(account_id) @@ -169,8 +171,8 @@ def create_objects(cnx, acc, o_amount, fmax, index_containers): containers_d = index_containers[acc] for container, details in containers_d.items(): for i in range(o_amount): - print "Put data for container %s" % container - f_object = StringIO() + logging.info("Put data for container %s" % container) + f_object = StringIO.StringIO() if not i and o_amount > 1: # Generate an empty object in each container whether # we create more than one object @@ -186,7 +188,7 @@ def create_objects(cnx, acc, o_amount, fmax, index_containers): meta = dict(zip(meta_keys, meta_values)) data = f_object.read() etag = cnx.put_object(container, object_name, - data, headers=copy(meta)) + data, headers=copy.copy(meta)) f_object.close() obj_info = {'object_info': (object_name, etag, len(data)), 'meta': meta} @@ -206,8 +208,9 @@ def create_containers(cnx, acc, c_amount, index_containers=None): meta_values = [customize(m, (i + 1) % 3) for m in map(get_rand_str, ('meta_v_',) * 3)] meta = dict(zip(meta_keys, meta_values)) - print "Create container %s" % container_name.encode('ascii', 'ignore') - cnx.put_container(container_name, headers=copy(meta)) + logging.info("Create container %s" % + container_name.encode('ascii', 'ignore')) + cnx.put_container(container_name, headers=copy.copy(meta)) containers_d[container_name] = {'meta': meta, 'objects': []} @@ -222,8 +225,8 @@ def fill_swift(pool, created_account, c_amount, i = 0 for acc, users in created_account.items(): i += 1 - print "[Start Swift Account OPs %s/%s]" % \ - (i, len(created_account.keys())) + logging.info("[Start Swift Account OPs %s/%s]" % + (i, len(created_account.keys()))) pool.spawn_n(_fill_swift_job, acc, users, c_amount, o_amount, @@ -236,8 +239,8 @@ def load_index(): if os.path.isfile(index_path): try: index = pickle.load(file(index_path)) - print "Load previous index for account %s" % index_path - except: + logging.info("Load previous index for account %s" % index_path) + except Exception: index = {} else: index = {} @@ -249,8 +252,8 @@ def load_containers_index(): if os.path.isfile(index_containers_path): try: index = pickle.load(file(index_containers_path)) - print "Load previous index for %s" % index_containers_path - except: + logging.info("Load previous index for %s" % index_containers_path) + except Exception: index = {} else: index = {} diff --git a/swsync/utils.py b/swsync/utils.py index a5c06ed..1205282 100644 --- a/swsync/utils.py +++ b/swsync/utils.py @@ -16,6 +16,7 @@ # under the License. import os import ConfigParser +import logging CONFIG = None @@ -27,6 +28,23 @@ class ConfigurationError(Exception): pass +def set_logging(level): + logger = logging.getLogger() + logger.setLevel({ + 'debug': logging.DEBUG, + 'info': logging.INFO, + 'warning': logging.WARNING, + 'error': logging.ERROR, + 'critical': logging.CRITICAL}.get( + level.lower() + )) + loghandler = logging.StreamHandler() + logger.addHandler(loghandler) + logger = logging.LoggerAdapter(logger, 'swfiller') + logformat = logging.Formatter('%(asctime)s %(levelname)s %(message)s') + loghandler.setFormatter(logformat) + + def parse_ini(inicfg=None): if hasattr(inicfg, 'read'): fp = inicfg