Merge pull request #22 from morucci/issue-logger-10
Fix issue #10 and validate PEP8
This commit is contained in:
56
bin/swfiller
56
bin/swfiller
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
import argparse
|
import argparse
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import sys
|
import sys
|
||||||
@@ -10,10 +11,8 @@ import eventlet
|
|||||||
from keystoneclient.v2_0 import client as ksclient
|
from keystoneclient.v2_0 import client as ksclient
|
||||||
|
|
||||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||||
from swsync.utils import get_config
|
from swsync import filler
|
||||||
from swsync.filler import (load_index, load_containers_index,
|
from swsync import utils
|
||||||
create_swift_account, fill_swift,
|
|
||||||
delete_account_content, delete_account)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -39,8 +38,14 @@ def main():
|
|||||||
parser.add_argument('-s',
|
parser.add_argument('-s',
|
||||||
help='Specify the MAX file size. Files '
|
help='Specify the MAX file size. Files '
|
||||||
'will be from 1024 Bytes to MAX Bytes')
|
'will be from 1024 Bytes to MAX Bytes')
|
||||||
|
parser.add_argument('-d', '--log-level',
|
||||||
|
dest='log_level',
|
||||||
|
default='info',
|
||||||
|
help='Specify the log level')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
utils.set_logging(args.log_level)
|
||||||
|
|
||||||
if not args.create and not args.delete:
|
if not args.create and not args.delete:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -48,34 +53,35 @@ def main():
|
|||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
concurrency = int(get_config('filler', 'concurrency'))
|
concurrency = int(utils.get_config('filler', 'concurrency'))
|
||||||
pile = eventlet.GreenPile(concurrency)
|
pile = eventlet.GreenPile(concurrency)
|
||||||
pool = eventlet.GreenPool(concurrency)
|
pool = eventlet.GreenPool(concurrency)
|
||||||
|
|
||||||
_config = get_config('auth',
|
_config = utils.get_config('auth',
|
||||||
'keystone_origin_admin_credentials').split(':')
|
'keystone_origin_admin_credentials').split(':')
|
||||||
tenant_name, username, password = _config
|
tenant_name, username, password = _config
|
||||||
client = ksclient.Client(
|
client = ksclient.Client(
|
||||||
auth_url=get_config('auth', 'keystone_origin'),
|
auth_url=utils.get_config('auth', 'keystone_origin'),
|
||||||
username=username,
|
username=username,
|
||||||
password=password,
|
password=password,
|
||||||
tenant_name=tenant_name)
|
tenant_name=tenant_name)
|
||||||
|
|
||||||
index_path = get_config('filler', 'index_path')
|
index_path = utils.get_config('filler', 'index_path')
|
||||||
index_containers_path = get_config('filler', 'index_containers_path')
|
index_containers_path = utils.get_config('filler', 'index_containers_path')
|
||||||
|
|
||||||
if args.l:
|
if args.l:
|
||||||
index = load_index()
|
index = filler.load_index()
|
||||||
index_containers = load_containers_index()
|
index_containers = filler.load_containers_index()
|
||||||
else:
|
else:
|
||||||
index = {}
|
index = {}
|
||||||
index_containers = {}
|
index_containers = {}
|
||||||
if args.create:
|
if args.create:
|
||||||
if args.a is None or not args.a.isdigit():
|
if args.a is None or not args.a.isdigit():
|
||||||
print("Provide account amount by setting '-a' option")
|
logging.info("Provide account amount by setting '-a' option")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
if args.u is None or not args.u.isdigit():
|
if args.u is None or not args.u.isdigit():
|
||||||
print("Provide user by account amount by setting '-u' option")
|
logging.info("Provide user by account "
|
||||||
|
"amount by setting '-u' option")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
if args.s is None:
|
if args.s is None:
|
||||||
fmax = 1024
|
fmax = 1024
|
||||||
@@ -84,29 +90,29 @@ def main():
|
|||||||
fmax = max(1024, int(args.s))
|
fmax = max(1024, int(args.s))
|
||||||
else:
|
else:
|
||||||
fmax = 1024
|
fmax = 1024
|
||||||
created = create_swift_account(client, pile,
|
created = filler.create_swift_account(client, pile,
|
||||||
int(args.a),
|
int(args.a),
|
||||||
int(args.u), index=index)
|
int(args.u), index=index)
|
||||||
if args.f is not None and args.c is not None:
|
if args.f is not None and args.c is not None:
|
||||||
if args.f.isdigit() and args.c.isdigit():
|
if args.f.isdigit() and args.c.isdigit():
|
||||||
fill_swift(pool, created, int(args.c),
|
filler.fill_swift(pool, created, int(args.c),
|
||||||
int(args.f), fmax,
|
int(args.f), fmax,
|
||||||
index_containers=index_containers)
|
index_containers=index_containers)
|
||||||
else:
|
else:
|
||||||
print("'-c' and '-f' options must be integers")
|
logging.info("'-c' and '-f' options must be integers")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
pickle.dump(index, open(index_path, 'w'))
|
pickle.dump(index, open(index_path, 'w'))
|
||||||
pickle.dump(index_containers, open(index_containers_path, 'w'))
|
pickle.dump(index_containers, open(index_containers_path, 'w'))
|
||||||
if args.delete:
|
if args.delete:
|
||||||
index = load_index()
|
index = filler.load_index()
|
||||||
for k, v in index.items():
|
for k, v in index.items():
|
||||||
user_info_list = [user[1] for user in v]
|
user_info_list = [user[1] for user in v]
|
||||||
# Take the first user we find
|
# Take the first user we find
|
||||||
delete_account_content(k, v[0])
|
filler.delete_account_content(k, v[0])
|
||||||
delete_account(client, user_info_list, k)
|
filler.delete_account(client, user_info_list, k)
|
||||||
del index[k]
|
del index[k]
|
||||||
if not os.path.exists(index_path):
|
if not os.path.exists(index_path):
|
||||||
print "No index_path to load."
|
logging.info("No index_path to load.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
pickle.dump(index, open(index_path, 'w'))
|
pickle.dump(index, open(index_path, 'w'))
|
||||||
|
|
||||||
|
|||||||
22
bin/swsync
22
bin/swsync
@@ -16,32 +16,16 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
import sys
|
import sys
|
||||||
import logging
|
|
||||||
import optparse
|
import optparse
|
||||||
|
|
||||||
import swsync.accounts
|
import swsync.accounts
|
||||||
from swsync.utils import parse_ini, ConfigurationError
|
from swsync.utils import parse_ini, ConfigurationError, set_logging
|
||||||
|
|
||||||
|
|
||||||
class Main(object):
|
class Main(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.options = {}
|
self.options = {}
|
||||||
|
|
||||||
def set_logging(self):
|
|
||||||
logger = logging.getLogger()
|
|
||||||
logger.setLevel({
|
|
||||||
'debug': logging.DEBUG,
|
|
||||||
'info': logging.INFO,
|
|
||||||
'warning': logging.WARNING,
|
|
||||||
'error': logging.ERROR,
|
|
||||||
'critical': logging.CRITICAL}.get(
|
|
||||||
self.options.log_level.lower()
|
|
||||||
))
|
|
||||||
loghandler = logging.StreamHandler()
|
|
||||||
logger.addHandler(loghandler)
|
|
||||||
logger = logging.LoggerAdapter(logger, 'swsync')
|
|
||||||
logformat = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
|
||||||
loghandler.setFormatter(logformat)
|
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
usage = "usage: %prog [OPTIONS] [CONF_FILE]"
|
usage = "usage: %prog [OPTIONS] [CONF_FILE]"
|
||||||
parser = optparse.OptionParser(usage=usage)
|
parser = optparse.OptionParser(usage=usage)
|
||||||
@@ -60,7 +44,7 @@ class Main(object):
|
|||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
self.set_logging()
|
set_logging(self.options.log_level.lower())
|
||||||
#beurk
|
#beurk
|
||||||
swsync.utils.CONFIG = conf
|
swsync.utils.CONFIG = conf
|
||||||
swsync.accounts.main()
|
swsync.accounts.main()
|
||||||
|
|||||||
@@ -25,15 +25,16 @@
|
|||||||
# Read pickled index file (index_path) to process a deletion
|
# Read pickled index file (index_path) to process a deletion
|
||||||
# of objects/containers store in swift for each account then delete
|
# of objects/containers store in swift for each account then delete
|
||||||
# accounts.
|
# accounts.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import logging
|
||||||
import pickle
|
import pickle
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
import StringIO
|
||||||
from StringIO import StringIO
|
|
||||||
from copy import copy
|
|
||||||
|
|
||||||
from swiftclient import client as sclient
|
from swiftclient import client as sclient
|
||||||
|
|
||||||
@@ -100,16 +101,16 @@ def create_swift_account(client, pile,
|
|||||||
account = get_rand_str(mode='account_')
|
account = get_rand_str(mode='account_')
|
||||||
# Create a tenant. In swift this is an account
|
# Create a tenant. In swift this is an account
|
||||||
account_id = client.tenants.create(account).id
|
account_id = client.tenants.create(account).id
|
||||||
print 'Account created %s' % account
|
logging.info('Account created %s' % account)
|
||||||
r = create_swift_user(client, account, account_id, user_amount)
|
r = create_swift_user(client, account, account_id, user_amount)
|
||||||
print 'Users created %s in account %s' % (str(r), account)
|
logging.info('Users created %s in account %s' % (str(r), account))
|
||||||
return account, account_id, r
|
return account, account_id, r
|
||||||
created = {}
|
created = {}
|
||||||
# Spawn a greenlet for each account
|
# Spawn a greenlet for each account
|
||||||
i = 0
|
i = 0
|
||||||
for i in range(account_amount):
|
for i in range(account_amount):
|
||||||
i += 1
|
i += 1
|
||||||
print "[Keystone Start OPs %s/%s]" % (i, account_amount)
|
logging.info("[Keystone Start OPs %s/%s]" % (i, account_amount))
|
||||||
pile.spawn(_create_account, user_amount)
|
pile.spawn(_create_account, user_amount)
|
||||||
for account, account_id, ret in pile:
|
for account, account_id, ret in pile:
|
||||||
index[(account, account_id)] = ret
|
index[(account, account_id)] = ret
|
||||||
@@ -130,8 +131,9 @@ def delete_account_content(acc, user):
|
|||||||
in container_infos[1]]
|
in container_infos[1]]
|
||||||
# Delete objects
|
# Delete objects
|
||||||
for obj in object_names:
|
for obj in object_names:
|
||||||
print "Deleting object %s in container %s for account %s" % (
|
logging.info("\
|
||||||
obj, container, str(acc))
|
Deleting object %s in container %s for account %s" %
|
||||||
|
(obj, container, str(acc)))
|
||||||
cnx.delete_object(container, obj)
|
cnx.delete_object(container, obj)
|
||||||
|
|
||||||
|
|
||||||
@@ -140,9 +142,9 @@ def delete_account(client, user_id, acc):
|
|||||||
if not isinstance(user_id, list):
|
if not isinstance(user_id, list):
|
||||||
user_id = (user_id,)
|
user_id = (user_id,)
|
||||||
for uid in user_id:
|
for uid in user_id:
|
||||||
print "Delete user with id : %s" % uid
|
logging.info("Delete user with id : %s" % uid)
|
||||||
client.users.delete(uid)
|
client.users.delete(uid)
|
||||||
print "Delete account %s" % account_id
|
logging.info("Delete account %s" % account_id)
|
||||||
client.tenants.delete(account_id)
|
client.tenants.delete(account_id)
|
||||||
|
|
||||||
|
|
||||||
@@ -169,8 +171,8 @@ def create_objects(cnx, acc, o_amount, fmax, index_containers):
|
|||||||
containers_d = index_containers[acc]
|
containers_d = index_containers[acc]
|
||||||
for container, details in containers_d.items():
|
for container, details in containers_d.items():
|
||||||
for i in range(o_amount):
|
for i in range(o_amount):
|
||||||
print "Put data for container %s" % container
|
logging.info("Put data for container %s" % container)
|
||||||
f_object = StringIO()
|
f_object = StringIO.StringIO()
|
||||||
if not i and o_amount > 1:
|
if not i and o_amount > 1:
|
||||||
# Generate an empty object in each container whether
|
# Generate an empty object in each container whether
|
||||||
# we create more than one object
|
# we create more than one object
|
||||||
@@ -186,7 +188,7 @@ def create_objects(cnx, acc, o_amount, fmax, index_containers):
|
|||||||
meta = dict(zip(meta_keys, meta_values))
|
meta = dict(zip(meta_keys, meta_values))
|
||||||
data = f_object.read()
|
data = f_object.read()
|
||||||
etag = cnx.put_object(container, object_name,
|
etag = cnx.put_object(container, object_name,
|
||||||
data, headers=copy(meta))
|
data, headers=copy.copy(meta))
|
||||||
f_object.close()
|
f_object.close()
|
||||||
obj_info = {'object_info':
|
obj_info = {'object_info':
|
||||||
(object_name, etag, len(data)), 'meta': meta}
|
(object_name, etag, len(data)), 'meta': meta}
|
||||||
@@ -206,8 +208,9 @@ def create_containers(cnx, acc, c_amount, index_containers=None):
|
|||||||
meta_values = [customize(m, (i + 1) % 3) for m in
|
meta_values = [customize(m, (i + 1) % 3) for m in
|
||||||
map(get_rand_str, ('meta_v_',) * 3)]
|
map(get_rand_str, ('meta_v_',) * 3)]
|
||||||
meta = dict(zip(meta_keys, meta_values))
|
meta = dict(zip(meta_keys, meta_values))
|
||||||
print "Create container %s" % container_name.encode('ascii', 'ignore')
|
logging.info("Create container %s" %
|
||||||
cnx.put_container(container_name, headers=copy(meta))
|
container_name.encode('ascii', 'ignore'))
|
||||||
|
cnx.put_container(container_name, headers=copy.copy(meta))
|
||||||
containers_d[container_name] = {'meta': meta, 'objects': []}
|
containers_d[container_name] = {'meta': meta, 'objects': []}
|
||||||
|
|
||||||
|
|
||||||
@@ -222,8 +225,8 @@ def fill_swift(pool, created_account, c_amount,
|
|||||||
i = 0
|
i = 0
|
||||||
for acc, users in created_account.items():
|
for acc, users in created_account.items():
|
||||||
i += 1
|
i += 1
|
||||||
print "[Start Swift Account OPs %s/%s]" % \
|
logging.info("[Start Swift Account OPs %s/%s]" %
|
||||||
(i, len(created_account.keys()))
|
(i, len(created_account.keys())))
|
||||||
pool.spawn_n(_fill_swift_job,
|
pool.spawn_n(_fill_swift_job,
|
||||||
acc, users,
|
acc, users,
|
||||||
c_amount, o_amount,
|
c_amount, o_amount,
|
||||||
@@ -236,8 +239,8 @@ def load_index():
|
|||||||
if os.path.isfile(index_path):
|
if os.path.isfile(index_path):
|
||||||
try:
|
try:
|
||||||
index = pickle.load(file(index_path))
|
index = pickle.load(file(index_path))
|
||||||
print "Load previous index for account %s" % index_path
|
logging.info("Load previous index for account %s" % index_path)
|
||||||
except:
|
except Exception:
|
||||||
index = {}
|
index = {}
|
||||||
else:
|
else:
|
||||||
index = {}
|
index = {}
|
||||||
@@ -249,8 +252,8 @@ def load_containers_index():
|
|||||||
if os.path.isfile(index_containers_path):
|
if os.path.isfile(index_containers_path):
|
||||||
try:
|
try:
|
||||||
index = pickle.load(file(index_containers_path))
|
index = pickle.load(file(index_containers_path))
|
||||||
print "Load previous index for %s" % index_containers_path
|
logging.info("Load previous index for %s" % index_containers_path)
|
||||||
except:
|
except Exception:
|
||||||
index = {}
|
index = {}
|
||||||
else:
|
else:
|
||||||
index = {}
|
index = {}
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
import os
|
import os
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
CONFIG = None
|
CONFIG = None
|
||||||
@@ -27,6 +28,23 @@ class ConfigurationError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def set_logging(level):
|
||||||
|
logger = logging.getLogger()
|
||||||
|
logger.setLevel({
|
||||||
|
'debug': logging.DEBUG,
|
||||||
|
'info': logging.INFO,
|
||||||
|
'warning': logging.WARNING,
|
||||||
|
'error': logging.ERROR,
|
||||||
|
'critical': logging.CRITICAL}.get(
|
||||||
|
level.lower()
|
||||||
|
))
|
||||||
|
loghandler = logging.StreamHandler()
|
||||||
|
logger.addHandler(loghandler)
|
||||||
|
logger = logging.LoggerAdapter(logger, 'swfiller')
|
||||||
|
logformat = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
||||||
|
loghandler.setFormatter(logformat)
|
||||||
|
|
||||||
|
|
||||||
def parse_ini(inicfg=None):
|
def parse_ini(inicfg=None):
|
||||||
if hasattr(inicfg, 'read'):
|
if hasattr(inicfg, 'read'):
|
||||||
fp = inicfg
|
fp = inicfg
|
||||||
|
|||||||
Reference in New Issue
Block a user