Updates to remove _ usage that is not i18n related

This commit is contained in:
gholt 2011-01-19 15:21:57 -08:00
parent 56791413b8
commit 9dd1e2ae84
22 changed files with 60 additions and 56 deletions

19
bin/st
View File

@ -80,7 +80,7 @@ except ImportError:
res = []
consts = {'true': True, 'false': False, 'null': None}
string = '(' + comments.sub('', string) + ')'
for type, val, _, _, _ in \
for type, val, _junk, _junk, _junk in \
generate_tokens(StringIO(string).readline):
if (type == OP and val not in '[]{}:,()-') or \
(type == NAME and val not in consts):
@ -914,7 +914,7 @@ def st_delete(parser, args, print_queue, error_queue):
segment_queue.put((scontainer, delobj['name']))
if not segment_queue.empty():
segment_threads = [QueueFunctionThread(segment_queue,
_delete_segment, create_connection()) for _ in
_delete_segment, create_connection()) for _junk in
xrange(10)]
for thread in segment_threads:
thread.start()
@ -972,11 +972,11 @@ def st_delete(parser, args, print_queue, error_queue):
create_connection = lambda: Connection(options.auth, options.user,
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
object_threads = [QueueFunctionThread(object_queue, _delete_object,
create_connection()) for _ in xrange(10)]
create_connection()) for _junk in xrange(10)]
for thread in object_threads:
thread.start()
container_threads = [QueueFunctionThread(container_queue,
_delete_container, create_connection()) for _ in xrange(10)]
_delete_container, create_connection()) for _junk in xrange(10)]
for thread in container_threads:
thread.start()
if not args:
@ -1142,11 +1142,11 @@ def st_download(options, args, print_queue, error_queue):
create_connection = lambda: Connection(options.auth, options.user,
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
object_threads = [QueueFunctionThread(object_queue, _download_object,
create_connection()) for _ in xrange(10)]
create_connection()) for _junk in xrange(10)]
for thread in object_threads:
thread.start()
container_threads = [QueueFunctionThread(container_queue,
_download_container, create_connection()) for _ in xrange(10)]
_download_container, create_connection()) for _junk in xrange(10)]
for thread in container_threads:
thread.start()
if not args:
@ -1525,7 +1525,8 @@ def st_upload(options, args, print_queue, error_queue):
full_size = getsize(path)
segment_queue = Queue(10000)
segment_threads = [QueueFunctionThread(segment_queue,
_segment_job, create_connection()) for _ in xrange(10)]
_segment_job, create_connection()) for _junk in
xrange(10)]
for thread in segment_threads:
thread.start()
segment = 0
@ -1569,7 +1570,7 @@ def st_upload(options, args, print_queue, error_queue):
'container': scontainer, 'obj': delobj['name']})
if not segment_queue.empty():
segment_threads = [QueueFunctionThread(segment_queue,
_segment_job, create_connection()) for _ in
_segment_job, create_connection()) for _junk in
xrange(10)]
for thread in segment_threads:
thread.start()
@ -1603,7 +1604,7 @@ def st_upload(options, args, print_queue, error_queue):
create_connection = lambda: Connection(options.auth, options.user,
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
object_threads = [QueueFunctionThread(object_queue, _object_job,
create_connection()) for _ in xrange(10)]
create_connection()) for _junk in xrange(10)]
for thread in object_threads:
thread.start()
conn = create_connection()

View File

@ -25,7 +25,7 @@ if __name__ == '__main__':
gettext.install('swift', unicode=1)
if len(argv) != 4 or argv[1] != '-K':
exit('Syntax: %s -K <super_admin_key> <path to auth.db>' % argv[0])
_, _, super_admin_key, auth_db = argv
_junk, _junk, super_admin_key, auth_db = argv
call(['swauth-prep', '-K', super_admin_key])
conn = sqlite3.connect(auth_db)
for account, cfaccount, user, password, admin, reseller_admin in \

View File

@ -105,7 +105,7 @@ if __name__ == '__main__':
else:
conf = CONF_DEFAULTS
parser.set_defaults(**conf)
options, _ = parser.parse_args()
options, _junk = parser.parse_args()
if options.concurrency is not '':
options.put_concurrency = options.concurrency
options.get_concurrency = options.concurrency

View File

@ -32,7 +32,7 @@ GRACEFUL_SHUTDOWN_SERVERS = ['account-server', 'container-server',
MAX_DESCRIPTORS = 32768
MAX_MEMORY = (1024 * 1024 * 1024) * 2 # 2 GB
_, server, command = sys.argv
_junk, server, command = sys.argv
if server == 'all':
servers = ALL_SERVERS
else:
@ -155,7 +155,7 @@ def do_stop(server, graceful=False):
except OSError:
pass
for pid_file, pid in pfiles:
for _ in xrange(150): # 15 seconds
for _junk in xrange(150): # 15 seconds
if not os.path.exists('/proc/%s' % pid):
break
time.sleep(0.1)

View File

@ -127,7 +127,7 @@ if __name__ == '__main__':
next_report += 2
while need_to_queue >= 1:
container = 'stats_container_dispersion_%s' % uuid4()
part, _ = container_ring.get_nodes(account, container)
part, _junk = container_ring.get_nodes(account, container)
if part in parts_left:
coropool.spawn(put_container, connpool, container, report)
sleep()
@ -152,7 +152,7 @@ if __name__ == '__main__':
next_report += 2
while need_to_queue >= 1:
obj = 'stats_object_dispersion_%s' % uuid4()
part, _ = object_ring.get_nodes(account, container, obj)
part, _junk = object_ring.get_nodes(account, container, obj)
if part in parts_left:
coropool.spawn(put_object, connpool, container, obj, report)
sleep()

View File

@ -107,7 +107,7 @@ def audit(coropool, connpool, account, container_ring, object_ring, options):
found = False
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
try:
attempts, _ = direct_client.retry(
attempts, _junk = direct_client.retry(
direct_client.direct_head_object, node, part,
account, container, obj, error_log=error_log,
retries=options.retries)
@ -160,7 +160,7 @@ def audit(coropool, connpool, account, container_ring, object_ring, options):
print 'Containers Missing'
print '-' * 78
for container in sorted(containers_missing_replicas.keys()):
part, _ = container_ring.get_nodes(account, container)
part, _junk = container_ring.get_nodes(account, container)
for node in containers_missing_replicas[container]:
print 'http://%s:%s/%s/%s/%s/%s' % (node['ip'], node['port'],
node['device'], part, account, container)
@ -170,8 +170,8 @@ def audit(coropool, connpool, account, container_ring, object_ring, options):
print 'Objects Missing'
print '-' * 78
for opath in sorted(objects_missing_replicas.keys()):
_, container, obj = opath.split('/', 2)
part, _ = object_ring.get_nodes(account, container, obj)
_junk, container, obj = opath.split('/', 2)
part, _junk = object_ring.get_nodes(account, container, obj)
for node in objects_missing_replicas[opath]:
print 'http://%s:%s/%s/%s/%s/%s/%s' % (node['ip'],
node['port'], node['device'], part, account, container,
@ -200,7 +200,7 @@ def container_dispersion_report(coropool, connpool, account, container_ring,
for node in nodes:
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
try:
attempts, _ = direct_client.retry(
attempts, _junk = direct_client.retry(
direct_client.direct_head_container, node,
part, account, container, error_log=error_log,
retries=options.retries)
@ -284,7 +284,7 @@ def object_dispersion_report(coropool, connpool, account, object_ring, options):
for node in nodes:
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
try:
attempts, _ = direct_client.retry(
attempts, _junk = direct_client.retry(
direct_client.direct_head_object, node, part,
account, container, obj, error_log=error_log,
retries=options.retries)

View File

@ -229,7 +229,7 @@ class AccountReaper(Daemon):
if not containers:
break
try:
for (container, _, _, _) in containers:
for (container, _junk, _junk, _junk) in containers:
self.container_pool.spawn(self.reap_container, account,
partition, nodes, container)
self.container_pool.waitall()

View File

@ -435,7 +435,7 @@ YOU HAVE A FEW OPTIONS:
:param request: webob.Request object
"""
try:
_, token = split_path(request.path, minsegs=2)
_junk, token = split_path(request.path, minsegs=2)
except ValueError:
return HTTPBadRequest()
# Retrieves (TTL, account, user, cfaccount) if valid, False otherwise
@ -478,7 +478,8 @@ YOU HAVE A FEW OPTIONS:
:param request: webob.Request object
"""
try:
_, account_name, user_name = split_path(request.path, minsegs=3)
_junk, account_name, user_name = \
split_path(request.path, minsegs=3)
except ValueError:
return HTTPBadRequest()
create_reseller_admin = \

View File

@ -76,7 +76,7 @@ except ImportError:
res = []
consts = {'true': True, 'false': False, 'null': None}
string = '(' + comments.sub('', string) + ')'
for type, val, _, _, _ in \
for type, val, _junk, _junk, _junk in \
generate_tokens(StringIO(string).readline):
if (type == OP and val not in '[]{}:,()-') or \
(type == NAME and val not in consts):

View File

@ -932,7 +932,7 @@ class ContainerBroker(DatabaseBroker):
if not row:
return []
max_rowid = row['ROWID']
for _ in xrange(min(max_count, max_rowid)):
for _junk in xrange(min(max_count, max_rowid)):
row = conn.execute('''
SELECT name FROM object WHERE ROWID >= ? AND +deleted = 0
LIMIT 1
@ -1435,7 +1435,7 @@ class AccountBroker(DatabaseBroker):
if not row:
return []
max_rowid = row['ROWID']
for _ in xrange(min(max_count, max_rowid)):
for _junk in xrange(min(max_count, max_rowid)):
row = conn.execute('''
SELECT name FROM container WHERE
ROWID >= ? AND +deleted = 0

View File

@ -299,8 +299,8 @@ class Swauth(object):
req.start_time = time()
handler = None
try:
version, account, user, _ = split_path(req.path_info, minsegs=1,
maxsegs=4, rest_with_last=True)
version, account, user, _junk = split_path(req.path_info,
minsegs=1, maxsegs=4, rest_with_last=True)
except ValueError:
return HTTPNotFound(request=req)
if version in ('v1', 'v1.0', 'auth'):

View File

@ -399,7 +399,8 @@ class Swift3Middleware(object):
h += header.lower() + ":" + str(req.headers[header]) + "\n"
h += req.path
try:
account, user, _ = req.headers['Authorization'].split(' ')[-1].split(':')
account, user, _junk = \
req.headers['Authorization'].split(' ')[-1].split(':')
except:
return None, None
token = base64.urlsafe_b64encode(h)

View File

@ -239,7 +239,7 @@ class RingBuilder(object):
(sum(d['parts'] for d in self.devs if d is not None),
self.parts * self.replicas))
if stats:
dev_usage = array('I', (0 for _ in xrange(len(self.devs))))
dev_usage = array('I', (0 for _junk in xrange(len(self.devs))))
for part in xrange(self.parts):
zones = {}
for replica in xrange(self.replicas):
@ -342,8 +342,9 @@ class RingBuilder(object):
'%08x.%04x' % (dev['parts_wanted'], randint(0, 0xffff))
available_devs = sorted((d for d in self.devs if d is not None),
key=lambda x: x['sort_key'])
self._replica2part2dev = [array('H') for _ in xrange(self.replicas)]
for _ in xrange(self.parts):
self._replica2part2dev = \
[array('H') for _junk in xrange(self.replicas)]
for _junk in xrange(self.parts):
other_zones = array('H')
for replica in xrange(self.replicas):
index = len(available_devs) - 1
@ -365,7 +366,7 @@ class RingBuilder(object):
index = mid + 1
available_devs.insert(index, dev)
other_zones.append(dev['zone'])
self._last_part_moves = array('B', (0 for _ in xrange(self.parts)))
self._last_part_moves = array('B', (0 for _junk in xrange(self.parts)))
self._last_part_moves_epoch = int(time())
for dev in self.devs:
del dev['sort_key']

View File

@ -577,7 +577,7 @@ class ObjectController(object):
if suffix:
recalculate_hashes(path, suffix.split('-'))
return Response()
_, hashes = get_hashes(path, do_listdir=False)
_junk, hashes = get_hashes(path, do_listdir=False)
return Response(body=pickle.dumps(hashes))
def __call__(self, env, start_response):

View File

@ -87,11 +87,11 @@ class AccountStat(Daemon):
broker = AccountBroker(db_path)
if not broker.is_deleted():
(account_name,
_, _, _,
_junk, _junk, _junk,
container_count,
object_count,
bytes_used,
_, _) = broker.get_info()
_junk, _junk) = broker.get_info()
line_data = '"%s",%d,%d,%d\n' % (
account_name, container_count,
object_count, bytes_used)

View File

@ -365,7 +365,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count):
results = []
in_queue = multiprocessing.Queue()
out_queue = multiprocessing.Queue()
for _ in range(worker_count):
for _junk in range(worker_count):
p = multiprocessing.Process(target=collate_worker,
args=(processor_args,
in_queue,
@ -374,7 +374,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count):
results.append(p)
for x in logs_to_process:
in_queue.put(x)
for _ in range(worker_count):
for _junk in range(worker_count):
in_queue.put(None)
count = 0
while True:

View File

@ -26,7 +26,7 @@ class StatsLogProcessor(object):
data_object_name):
'''generate hourly groupings of data from one stats log file'''
account_totals = {}
year, month, day, hour, _ = data_object_name.split('/')
year, month, day, hour, _junk = data_object_name.split('/')
for line in obj_stream:
if not line:
continue

View File

@ -119,7 +119,7 @@ class TestAuthServer(unittest.TestCase):
headers={'X-Storage-User': 'tester',
'X-Storage-Pass': 'testing'}))
token = res.headers['x-storage-token']
ttl, _, _, _ = self.controller.validate_token(token)
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
self.assert_(ttl > 0, repr(ttl))
def test_validate_token_expired(self):
@ -134,7 +134,7 @@ class TestAuthServer(unittest.TestCase):
headers={'X-Storage-User': 'tester',
'X-Storage-Pass': 'testing'}))
token = res.headers['x-storage-token']
ttl, _, _, _ = self.controller.validate_token(token)
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
self.assert_(ttl > 0, repr(ttl))
auth_server.time = lambda: 1 + self.controller.token_life
self.assertEquals(self.controller.validate_token(token), False)
@ -318,7 +318,7 @@ class TestAuthServer(unittest.TestCase):
headers={'X-Storage-User': 'tester',
'X-Storage-Pass': 'testing'}))
token = res.headers['x-storage-token']
ttl, _, _, _ = self.controller.validate_token(token)
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
self.assert_(ttl > 0, repr(ttl))
def test_auth_SOSO_good_Mosso_headers(self):
@ -330,7 +330,7 @@ class TestAuthServer(unittest.TestCase):
headers={'X-Auth-User': 'test:tester',
'X-Auth-Key': 'testing'}))
token = res.headers['x-storage-token']
ttl, _, _, _ = self.controller.validate_token(token)
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
self.assert_(ttl > 0, repr(ttl))
def test_auth_SOSO_bad_Mosso_headers(self):
@ -438,7 +438,7 @@ class TestAuthServer(unittest.TestCase):
headers={'X-Auth-User': 'test:tester',
'X-Auth-Key': 'testing'}))
token = res.headers['x-storage-token']
ttl, _, _, _ = self.controller.validate_token(token)
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
self.assert_(ttl > 0, repr(ttl))
def test_auth_Mosso_good_SOSO_header_names(self):
@ -450,7 +450,7 @@ class TestAuthServer(unittest.TestCase):
headers={'X-Storage-User': 'test:tester',
'X-Storage-Pass': 'testing'}))
token = res.headers['x-storage-token']
ttl, _, _, _ = self.controller.validate_token(token)
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
self.assert_(ttl > 0, repr(ttl))
def test_basic_logging(self):
@ -712,7 +712,7 @@ class TestAuthServer(unittest.TestCase):
res = self.controller.handle_auth(Request.blank('/v1.0',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
_, _, _, stgact = \
_junk, _junk, _junk, stgact = \
self.controller.validate_token(res.headers['x-auth-token'])
self.assertEquals(stgact, '')
@ -723,7 +723,7 @@ class TestAuthServer(unittest.TestCase):
res = self.controller.handle_auth(Request.blank('/v1.0',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
_, _, _, vstgact = \
_junk, _junk, _junk, vstgact = \
self.controller.validate_token(res.headers['x-auth-token'])
self.assertEquals(stgact, vstgact)
@ -734,7 +734,7 @@ class TestAuthServer(unittest.TestCase):
res = self.controller.handle_auth(Request.blank('/v1.0',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
_, _, _, stgact = \
_junk, _junk, _junk, stgact = \
self.controller.validate_token(res.headers['x-auth-token'])
self.assertEquals(stgact, '.reseller_admin')

View File

@ -95,7 +95,7 @@ class Logger(object):
self.error_value = (msg, args, kwargs)
def exception(self, msg, *args, **kwargs):
_, exc, _ = sys.exc_info()
_junk, exc, _junk = sys.exc_info()
self.exception_value = (msg,
'%s %s' % (exc.__class__.__name__, str(exc)), args, kwargs)

View File

@ -35,10 +35,10 @@ class TestHttpHelpers(unittest.TestCase):
def test_http_connection(self):
url = 'http://www.test.com'
_, conn = c.http_connection(url)
_junk, conn = c.http_connection(url)
self.assertTrue(isinstance(conn, c.HTTPConnection))
url = 'https://www.test.com'
_, conn = c.http_connection(url)
_junk, conn = c.http_connection(url)
self.assertTrue(isinstance(conn, c.HTTPSConnection))
url = 'ftp://www.test.com'
self.assertRaises(c.ClientException, c.http_connection, url)

View File

@ -142,7 +142,7 @@ class TestContainerUpdater(unittest.TestCase):
bindsock = listen(('127.0.0.1', 0))
def spawn_accepts():
events = []
for _ in xrange(2):
for _junk in xrange(2):
sock, addr = bindsock.accept()
events.append(spawn(accept, sock, addr, 201))
return events
@ -195,7 +195,7 @@ class TestContainerUpdater(unittest.TestCase):
bindsock = listen(('127.0.0.1', 0))
def spawn_accepts():
events = []
for _ in xrange(2):
for _junk in xrange(2):
with Timeout(3):
sock, addr = bindsock.accept()
events.append(spawn(accept, sock, addr))

View File

@ -1154,7 +1154,7 @@ class TestObjectController(unittest.TestCase):
self.assert_status_map(controller.HEAD, (503, 200, 200), 200)
self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2)
self.assert_('last_error' in controller.app.object_ring.devs[0])
for _ in xrange(self.app.error_suppression_limit):
for _junk in xrange(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD, (503, 503, 503), 503)
self.assertEquals(controller.app.object_ring.devs[0]['errors'],
self.app.error_suppression_limit + 1)
@ -2590,7 +2590,7 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(
controller.app.container_ring.devs[0]['errors'], 2)
self.assert_('last_error' in controller.app.container_ring.devs[0])
for _ in xrange(self.app.error_suppression_limit):
for _junk in xrange(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD,
(200, 503, 503, 503), 503)
self.assertEquals(controller.app.container_ring.devs[0]['errors'],