67
bin/swsync
67
bin/swsync
@@ -1,11 +1,70 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- encoding: utf-8 -*-
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
|
||||
#
|
||||
# Author: Chmouel Boudjnah <chmouel@enovance.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
import logging
|
||||
import optparse
|
||||
|
||||
import swsync.accounts
|
||||
from swsync.utils import parse_ini, ConfigurationError
|
||||
|
||||
class Main(object):
|
||||
def __init__(self):
|
||||
self.options = {}
|
||||
|
||||
def set_logging(self):
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel({
|
||||
'debug': logging.DEBUG,
|
||||
'info': logging.INFO,
|
||||
'warning': logging.WARNING,
|
||||
'error': logging.ERROR,
|
||||
'critical': logging.CRITICAL}.get(
|
||||
self.options.log_level.lower()
|
||||
))
|
||||
loghandler = logging.StreamHandler()
|
||||
logger.addHandler(loghandler)
|
||||
logger = logging.LoggerAdapter(logger, 'swsync')
|
||||
logformat = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
||||
loghandler.setFormatter(logformat)
|
||||
|
||||
def main(self):
|
||||
usage = "usage: %prog [OPTIONS] [CONF_FILE]"
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option(
|
||||
'-l', '--log-level',
|
||||
dest='log_level',
|
||||
default='info',
|
||||
help='Number of containers to distribute objects among')
|
||||
self.options, args = parser.parse_args()
|
||||
if args:
|
||||
conf = parse_ini(args[0])
|
||||
else:
|
||||
try:
|
||||
conf = parse_ini()
|
||||
except(ConfigurationError):
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
self.set_logging()
|
||||
#beurk
|
||||
swsync.utils.CONFIG = conf
|
||||
swsync.accounts.main()
|
||||
|
||||
if __name__ == '__main__':
|
||||
swsync.accounts.main()
|
||||
m = Main()
|
||||
m.main()
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
# under the License.
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import swiftclient
|
||||
import dateutil.relativedelta
|
||||
@@ -62,7 +63,7 @@ class Accounts(object):
|
||||
full_listing=True))
|
||||
|
||||
for container in orig_containers:
|
||||
print container
|
||||
logging.info("Syncronizing %s: %s", container['name'], container)
|
||||
dt1 = datetime.datetime.fromtimestamp(time.time())
|
||||
self.container_cls.sync(orig_storage_cnx,
|
||||
orig_storage_url,
|
||||
@@ -74,10 +75,10 @@ class Accounts(object):
|
||||
dt2 = datetime.datetime.fromtimestamp(time.time())
|
||||
rd = dateutil.relativedelta.relativedelta(dt2, dt1)
|
||||
#TODO(chmou): use logging
|
||||
print "%d hours, %d minutes and %d seconds" % (rd.hours,
|
||||
rd.minutes,
|
||||
rd.seconds)
|
||||
print
|
||||
logging.info("%s done: %d hours, %d minutes and %d seconds",
|
||||
container['name'],
|
||||
rd.hours,
|
||||
rd.minutes, rd.seconds)
|
||||
|
||||
def process(self):
|
||||
"""Process all keystone accounts to sync."""
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
|
||||
import swiftclient
|
||||
import eventlet
|
||||
|
||||
@@ -63,7 +65,7 @@ class Containers(object):
|
||||
pool = eventlet.GreenPool(size=self.max_gthreads)
|
||||
pile = eventlet.GreenPile(pool)
|
||||
for obj in diff:
|
||||
print obj
|
||||
logging.info("sending: %s ts:%s", obj[1], obj[0])
|
||||
pile.spawn(self.objects_cls,
|
||||
orig_storage_url,
|
||||
orig_token,
|
||||
|
||||
@@ -93,6 +93,3 @@ def sync_object(orig_storage_url, orig_token, dest_storage_url,
|
||||
swiftclient.put_object(sync_to, name=object_name,
|
||||
headers=post_headers,
|
||||
contents=_Iter2FileLikeObject(orig_body))
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
|
||||
@@ -73,6 +73,24 @@ class TestObject(test_base.TestCase):
|
||||
swobjects.get_object,
|
||||
self.orig_storage_url, "token", "cont1", "obj1")
|
||||
|
||||
def test_sync_object(self):
|
||||
body = ("X" * 3) * 1024
|
||||
new_connect = fake_http_connect(200, body)
|
||||
self.stubs.Set(swobjects, 'http_connect_raw', new_connect)
|
||||
|
||||
def put_object(url, name=None, headers=None, contents=None):
|
||||
self.assertEqual('obj1', name)
|
||||
self.assertIn('x-auth-token', headers)
|
||||
self.assertIsInstance(contents, swobjects._Iter2FileLikeObject)
|
||||
contents_read = contents.read()
|
||||
self.assertEqual(len(contents_read), len(body))
|
||||
|
||||
self.stubs.Set(swobjects.swiftclient, 'put_object', put_object)
|
||||
|
||||
swobjects.sync_object(self.orig_storage_url,
|
||||
"token", self.dest_storage_url, "token",
|
||||
"cont1", ("etag", "obj1"))
|
||||
|
||||
def test_get_object_chunked(self):
|
||||
chunk_size = 32
|
||||
expected_chunk_time = 3
|
||||
|
||||
Reference in New Issue
Block a user