i18n stuff
This commit is contained in:
@@ -90,7 +90,7 @@ class AuthController(object):
|
||||
self.logger = get_logger(conf)
|
||||
self.super_admin_key = conf.get('super_admin_key')
|
||||
if not self.super_admin_key:
|
||||
msg = 'No super_admin_key set in conf file! Exiting.'
|
||||
msg = _('No super_admin_key set in conf file! Exiting.')
|
||||
try:
|
||||
self.logger.critical(msg)
|
||||
except:
|
||||
@@ -206,8 +206,9 @@ YOU HAVE A FEW OPTIONS:
|
||||
resp = conn.getresponse()
|
||||
resp.read()
|
||||
if resp.status // 100 != 2:
|
||||
self.logger.error('ERROR attempting to create account %s: %s %s' %
|
||||
(url, resp.status, resp.reason))
|
||||
self.logger.error(_('ERROR attempting to create account %(url)s:' \
|
||||
' %(status)s %(reason)s') %
|
||||
{'url': url, 'status': resp.status, 'reason': resp.reason})
|
||||
return False
|
||||
return account_name
|
||||
|
||||
@@ -320,7 +321,7 @@ YOU HAVE A FEW OPTIONS:
|
||||
(account, user)).fetchone()
|
||||
if row:
|
||||
self.logger.info(
|
||||
'ALREADY EXISTS create_user(%s, %s, _, %s, %s) [%.02f]' %
|
||||
_('ALREADY EXISTS create_user(%s, %s, _, %s, %s) [%.02f]') %
|
||||
(repr(account), repr(user), repr(admin),
|
||||
repr(reseller_admin), time() - begin))
|
||||
return 'already exists'
|
||||
@@ -334,7 +335,7 @@ YOU HAVE A FEW OPTIONS:
|
||||
account_hash = self.add_storage_account()
|
||||
if not account_hash:
|
||||
self.logger.info(
|
||||
'FAILED create_user(%s, %s, _, %s, %s) [%.02f]' %
|
||||
_('FAILED create_user(%s, %s, _, %s, %s) [%.02f]') %
|
||||
(repr(account), repr(user), repr(admin),
|
||||
repr(reseller_admin), time() - begin))
|
||||
return False
|
||||
@@ -347,7 +348,7 @@ YOU HAVE A FEW OPTIONS:
|
||||
admin and 't' or '', reseller_admin and 't' or ''))
|
||||
conn.commit()
|
||||
self.logger.info(
|
||||
'SUCCESS create_user(%s, %s, _, %s, %s) = %s [%.02f]' %
|
||||
_('SUCCESS create_user(%s, %s, _, %s, %s) = %s [%.02f]') %
|
||||
(repr(account), repr(user), repr(admin), repr(reseller_admin),
|
||||
repr(url), time() - begin))
|
||||
return url
|
||||
@@ -611,7 +612,8 @@ YOU HAVE A FEW OPTIONS:
|
||||
return HTTPBadRequest(request=env)(env, start_response)
|
||||
response = handler(req)
|
||||
except:
|
||||
self.logger.exception('ERROR Unhandled exception in ReST request')
|
||||
self.logger.exception(
|
||||
_('ERROR Unhandled exception in ReST request'))
|
||||
return HTTPServiceUnavailable(request=req)(env, start_response)
|
||||
trans_time = '%.4f' % (time() - start_time)
|
||||
if not response.content_length and response.app_iter and \
|
||||
|
||||
@@ -86,8 +86,10 @@ class CNAMELookupMiddleware(object):
|
||||
break
|
||||
elif found_domain.endswith(self.storage_domain):
|
||||
# Found it!
|
||||
self.logger.info('Mapped %s to %s' % (given_domain,
|
||||
found_domain))
|
||||
self.logger.info(
|
||||
_('Mapped %(given_domain)s to %(found_domain)s') %
|
||||
{'given_domain': given_domain,
|
||||
'found_domain': found_domain})
|
||||
if port:
|
||||
env['HTTP_HOST'] = ':'.join([found_domain, port])
|
||||
else:
|
||||
@@ -96,8 +98,10 @@ class CNAMELookupMiddleware(object):
|
||||
break
|
||||
else:
|
||||
# try one more deep in the chain
|
||||
self.logger.debug('Following CNAME chain for %s to %s' %
|
||||
(given_domain, found_domain))
|
||||
self.logger.debug(_('Following CNAME chain for ' \
|
||||
'%(given_domain)s to %(found_domain)s') %
|
||||
{'given_domain': given_domain,
|
||||
'found_domain': found_domain})
|
||||
a_domain = found_domain
|
||||
if error:
|
||||
if found_domain:
|
||||
|
||||
@@ -453,7 +453,7 @@ def capture_stdio(logger, **kwargs):
|
||||
"""
|
||||
# log uncaught exceptions
|
||||
sys.excepthook = lambda * exc_info: \
|
||||
logger.critical('UNCAUGHT EXCEPTION', exc_info=exc_info)
|
||||
logger.critical(_('UNCAUGHT EXCEPTION'), exc_info=exc_info)
|
||||
|
||||
# collect stdio file desc not in use for logging
|
||||
stdio_fds = [0, 1, 2]
|
||||
|
||||
@@ -384,8 +384,8 @@ class Controller(object):
|
||||
if attempts_left <= 0:
|
||||
break
|
||||
except:
|
||||
self.exception_occurred(node, 'Account',
|
||||
'Trying to get account info for %s' % path)
|
||||
self.exception_occurred(node, _('Account'),
|
||||
_('Trying to get account info for %s') % path)
|
||||
if self.app.memcache and result_code in (200, 404):
|
||||
if result_code == 200:
|
||||
cache_timeout = self.app.recheck_account_existence
|
||||
@@ -462,8 +462,8 @@ class Controller(object):
|
||||
if attempts_left <= 0:
|
||||
break
|
||||
except:
|
||||
self.exception_occurred(node, 'Container',
|
||||
'Trying to get container info for %s' % path)
|
||||
self.exception_occurred(node, _('Container'),
|
||||
_('Trying to get container info for %s') % path)
|
||||
if self.app.memcache and result_code in (200, 404):
|
||||
if result_code == 200:
|
||||
cache_timeout = self.app.recheck_container_existence
|
||||
@@ -594,7 +594,8 @@ class Controller(object):
|
||||
source = conn.getresponse()
|
||||
except:
|
||||
self.exception_occurred(node, server_type,
|
||||
'Trying to %s %s' % (req.method, req.path))
|
||||
_('Trying to %(method)s %(path)s') %
|
||||
{'method': req.method, 'path': req.path})
|
||||
continue
|
||||
if source.status == 507:
|
||||
self.error_limit(node)
|
||||
@@ -624,8 +625,8 @@ class Controller(object):
|
||||
res.client_disconnect = True
|
||||
self.app.logger.info(_('Client disconnected on read'))
|
||||
except:
|
||||
self.exception_occurred(node, 'Object',
|
||||
'Trying to read during GET of %s' % req.path)
|
||||
self.exception_occurred(node, _('Object'),
|
||||
_('Trying to read during GET of %s') % req.path)
|
||||
raise
|
||||
res.app_iter = file_iter()
|
||||
update_headers(res, source.getheaders())
|
||||
@@ -648,8 +649,9 @@ class Controller(object):
|
||||
reasons.append(source.reason)
|
||||
bodies.append(source.read())
|
||||
if source.status >= 500:
|
||||
self.error_occurred(node, 'ERROR %d %s From %s Server' %
|
||||
(source.status, bodies[-1][:1024], server_type))
|
||||
self.error_occurred(node, _('ERROR %(status)d %(body)s ' \
|
||||
'From %(type)s Server') % {'status': source.status,
|
||||
'body': bodies[-1][:1024], 'type': server_type})
|
||||
return self.best_response(req, statuses, reasons, bodies,
|
||||
'%s %s' % (server_type, req.method))
|
||||
|
||||
@@ -686,12 +688,13 @@ class ObjectController(Controller):
|
||||
self.error_limit(node)
|
||||
elif response.status >= 500:
|
||||
self.error_occurred(node,
|
||||
'ERROR %d %s From Object Server' %
|
||||
(response.status, body[:1024]))
|
||||
_('ERROR %(status)d %(body)s From Object Server') %
|
||||
{'status': response.status, 'body': body[:1024]})
|
||||
return response.status, response.reason, body
|
||||
except:
|
||||
self.exception_occurred(node, 'Object',
|
||||
'Trying to %s %s' % (req.method, req.path))
|
||||
self.exception_occurred(node, _('Object'),
|
||||
_('Trying to %(method)s %(path)s') %
|
||||
{'method': req.method, 'path': req.path})
|
||||
return 500, '', ''
|
||||
|
||||
def GETorHEAD(self, req):
|
||||
@@ -990,8 +993,8 @@ class ObjectController(Controller):
|
||||
with Timeout(self.app.node_timeout):
|
||||
resp = conn.getexpect()
|
||||
except:
|
||||
self.exception_occurred(node, 'Object',
|
||||
'Expect: 100-continue on %s' % req.path)
|
||||
self.exception_occurred(node, _('Object'),
|
||||
_('Expect: 100-continue on %s') % req.path)
|
||||
if conn and resp:
|
||||
if resp.status == 100:
|
||||
conns.append(conn)
|
||||
@@ -1030,8 +1033,8 @@ class ObjectController(Controller):
|
||||
else:
|
||||
conn.send(chunk)
|
||||
except:
|
||||
self.exception_occurred(conn.node, 'Object',
|
||||
'Trying to write to %s' % req.path)
|
||||
self.exception_occurred(conn.node, _('Object'),
|
||||
_('Trying to write to %s') % req.path)
|
||||
conns.remove(conn)
|
||||
if len(conns) <= len(nodes) / 2:
|
||||
self.app.logger.error(
|
||||
@@ -1069,13 +1072,14 @@ class ObjectController(Controller):
|
||||
bodies.append(response.read())
|
||||
if response.status >= 500:
|
||||
self.error_occurred(conn.node,
|
||||
'ERROR %d %s From Object Server re: %s' %
|
||||
(response.status, bodies[-1][:1024], req.path))
|
||||
_('ERROR %(status)d %(body)s From Object Server ' \
|
||||
're: %(path)s') % {'status': response.status,
|
||||
'body': bodies[-1][:1024], 'path': req.path})
|
||||
elif 200 <= response.status < 300:
|
||||
etags.add(response.getheader('etag').strip('"'))
|
||||
except:
|
||||
self.exception_occurred(conn.node, 'Object',
|
||||
'Trying to get final status of PUT to %s' % req.path)
|
||||
self.exception_occurred(conn.node, _('Object'),
|
||||
_('Trying to get final status of PUT to %s') % req.path)
|
||||
if len(etags) > 1:
|
||||
self.app.logger.error(
|
||||
_('Object servers returned %s mismatched etags'), len(etags))
|
||||
@@ -1286,8 +1290,8 @@ class ContainerController(Controller):
|
||||
accounts.insert(0, account)
|
||||
except:
|
||||
accounts.insert(0, account)
|
||||
self.exception_occurred(node, 'Container',
|
||||
'Trying to PUT to %s' % req.path)
|
||||
self.exception_occurred(node, _('Container'),
|
||||
_('Trying to PUT to %s') % req.path)
|
||||
if not accounts:
|
||||
break
|
||||
while len(statuses) < len(containers):
|
||||
@@ -1341,8 +1345,8 @@ class ContainerController(Controller):
|
||||
elif source.status == 507:
|
||||
self.error_limit(node)
|
||||
except:
|
||||
self.exception_occurred(node, 'Container',
|
||||
'Trying to POST %s' % req.path)
|
||||
self.exception_occurred(node, _('Container'),
|
||||
_('Trying to POST %s') % req.path)
|
||||
if len(statuses) >= len(containers):
|
||||
break
|
||||
while len(statuses) < len(containers):
|
||||
@@ -1398,8 +1402,8 @@ class ContainerController(Controller):
|
||||
accounts.insert(0, account)
|
||||
except:
|
||||
accounts.insert(0, account)
|
||||
self.exception_occurred(node, 'Container',
|
||||
'Trying to DELETE %s' % req.path)
|
||||
self.exception_occurred(node, _('Container'),
|
||||
_('Trying to DELETE %s') % req.path)
|
||||
if not accounts:
|
||||
break
|
||||
while len(statuses) < len(containers):
|
||||
@@ -1482,8 +1486,8 @@ class AccountController(Controller):
|
||||
if source.status == 507:
|
||||
self.error_limit(node)
|
||||
except:
|
||||
self.exception_occurred(node, 'Account',
|
||||
'Trying to PUT to %s' % req.path)
|
||||
self.exception_occurred(node, _('Account'),
|
||||
_('Trying to PUT to %s') % req.path)
|
||||
if len(statuses) >= len(accounts):
|
||||
break
|
||||
while len(statuses) < len(accounts):
|
||||
@@ -1530,8 +1534,8 @@ class AccountController(Controller):
|
||||
elif source.status == 507:
|
||||
self.error_limit(node)
|
||||
except:
|
||||
self.exception_occurred(node, 'Account',
|
||||
'Trying to POST %s' % req.path)
|
||||
self.exception_occurred(node, _('Account'),
|
||||
_('Trying to POST %s') % req.path)
|
||||
if len(statuses) >= len(accounts):
|
||||
break
|
||||
while len(statuses) < len(accounts):
|
||||
@@ -1575,8 +1579,8 @@ class AccountController(Controller):
|
||||
elif source.status == 507:
|
||||
self.error_limit(node)
|
||||
except:
|
||||
self.exception_occurred(node, 'Account',
|
||||
'Trying to DELETE %s' % req.path)
|
||||
self.exception_occurred(node, _('Account'),
|
||||
_('Trying to DELETE %s') % req.path)
|
||||
if len(statuses) >= len(accounts):
|
||||
break
|
||||
while len(statuses) < len(accounts):
|
||||
|
||||
@@ -59,19 +59,20 @@ class AccessLogProcessor(object):
|
||||
headers,
|
||||
processing_time) = (unquote(x) for x in raw_log[16:].split(' '))
|
||||
except ValueError:
|
||||
self.logger.debug('Bad line data: %s' % repr(raw_log))
|
||||
self.logger.debug(_('Bad line data: %s') % repr(raw_log))
|
||||
return {}
|
||||
if server != self.server_name:
|
||||
# incorrect server name in log line
|
||||
self.logger.debug('Bad server name: found "%s" expected "%s"' \
|
||||
% (server, self.server_name))
|
||||
self.logger.debug(_('Bad server name: found "%(found)s" ' \
|
||||
'expected "%(expected)s"') %
|
||||
{'found': server, 'expected': self.server_name})
|
||||
return {}
|
||||
try:
|
||||
(version, account, container_name, object_name) = \
|
||||
split_path(request, 2, 4, True)
|
||||
except ValueError, e:
|
||||
self.logger.debug(
|
||||
'Invalid path: %s from data: %s' % (e, repr(raw_log)))
|
||||
self.logger.debug(_('Invalid path: %(error)s from data: %(log)s') %
|
||||
{'error': e, 'log': repr(raw_log)})
|
||||
return {}
|
||||
if container_name is not None:
|
||||
container_name = container_name.split('?', 1)[0]
|
||||
@@ -194,8 +195,9 @@ class AccessLogProcessor(object):
|
||||
if bad_lines > (total_lines * self.warn_percent):
|
||||
name = '/'.join([data_object_account, data_object_container,
|
||||
data_object_name])
|
||||
self.logger.warning('I found a bunch of bad lines in %s '\
|
||||
'(%d bad, %d total)' % (name, bad_lines, total_lines))
|
||||
self.logger.warning(_('I found a bunch of bad lines in %(name)s '\
|
||||
'(%(bad)d bad, %(total)d total)') %
|
||||
{'name': name, 'bad': bad_lines, 'total': total_lines})
|
||||
return hourly_aggr_info
|
||||
|
||||
def keylist_mapping(self):
|
||||
|
||||
@@ -52,10 +52,10 @@ class AccountStat(Daemon):
|
||||
self.logger = get_logger(stats_conf, 'swift-account-stats-logger')
|
||||
|
||||
def run_once(self):
|
||||
self.logger.info("Gathering account stats")
|
||||
self.logger.info(_("Gathering account stats"))
|
||||
start = time.time()
|
||||
self.find_and_process()
|
||||
self.logger.info("Gathering account stats complete (%0.2f minutes)" %
|
||||
self.logger.info(_("Gathering account stats complete (%0.2f minutes)") %
|
||||
((time.time() - start) / 60))
|
||||
|
||||
def find_and_process(self):
|
||||
@@ -70,14 +70,14 @@ class AccountStat(Daemon):
|
||||
# Account Name, Container Count, Object Count, Bytes Used
|
||||
for device in os.listdir(self.devices):
|
||||
if self.mount_check and not check_mount(self.devices, device):
|
||||
self.logger.error("Device %s is not mounted, skipping." %
|
||||
self.logger.error(_("Device %s is not mounted, skipping.") %
|
||||
device)
|
||||
continue
|
||||
accounts = os.path.join(self.devices,
|
||||
device,
|
||||
account_server_data_dir)
|
||||
if not os.path.exists(accounts):
|
||||
self.logger.debug("Path %s does not exist, skipping." %
|
||||
self.logger.debug(_("Path %s does not exist, skipping.") %
|
||||
accounts)
|
||||
continue
|
||||
for root, dirs, files in os.walk(accounts, topdown=False):
|
||||
|
||||
@@ -59,7 +59,7 @@ class LogProcessor(object):
|
||||
module = __import__(import_target, fromlist=[import_target])
|
||||
klass = getattr(module, class_name)
|
||||
self.plugins[plugin_name]['instance'] = klass(plugin_conf)
|
||||
self.logger.debug('Loaded plugin "%s"' % plugin_name)
|
||||
self.logger.debug(_('Loaded plugin "%s"') % plugin_name)
|
||||
|
||||
@property
|
||||
def internal_proxy(self):
|
||||
@@ -76,10 +76,9 @@ class LogProcessor(object):
|
||||
return self._internal_proxy
|
||||
|
||||
def process_one_file(self, plugin_name, account, container, object_name):
|
||||
self.logger.info('Processing %s/%s/%s with plugin "%s"' % (account,
|
||||
container,
|
||||
object_name,
|
||||
plugin_name))
|
||||
self.logger.info(_('Processing %(obj)s with plugin "%(plugin)s"') %
|
||||
{'obj': '/'.join((account, container, object_name)),
|
||||
'plugin': plugin_name})
|
||||
# get an iter of the object data
|
||||
compressed = object_name.endswith('.gz')
|
||||
stream = self.get_object_data(account, container, object_name,
|
||||
@@ -177,10 +176,9 @@ class LogProcessor(object):
|
||||
try:
|
||||
chunk = d.decompress(chunk)
|
||||
except zlib.error:
|
||||
self.logger.debug('Bad compressed data for %s/%s/%s' %
|
||||
(swift_account,
|
||||
container_name,
|
||||
object_name))
|
||||
self.logger.debug(_('Bad compressed data for %s')
|
||||
% '/'.join((swift_account, container_name,
|
||||
object_name)))
|
||||
raise BadFileDownload() # bad compressed data
|
||||
parts = chunk.split('\n')
|
||||
parts[0] = last_part + parts[0]
|
||||
@@ -239,7 +237,7 @@ class LogProcessorDaemon(Daemon):
|
||||
self.worker_count = int(c.get('worker_count', '1'))
|
||||
|
||||
def run_once(self):
|
||||
self.logger.info("Beginning log processing")
|
||||
self.logger.info(_("Beginning log processing"))
|
||||
start = time.time()
|
||||
if self.lookback_hours == 0:
|
||||
lookback_start = None
|
||||
@@ -277,14 +275,14 @@ class LogProcessorDaemon(Daemon):
|
||||
already_processed_files = set()
|
||||
except:
|
||||
already_processed_files = set()
|
||||
self.logger.debug('found %d processed files' % \
|
||||
self.logger.debug(_('found %d processed files') % \
|
||||
len(already_processed_files))
|
||||
logs_to_process = self.log_processor.get_data_list(lookback_start,
|
||||
lookback_end,
|
||||
already_processed_files)
|
||||
self.logger.info('loaded %d files to process' % len(logs_to_process))
|
||||
self.logger.info(_('loaded %d files to process') % len(logs_to_process))
|
||||
if not logs_to_process:
|
||||
self.logger.info("Log processing done (%0.2f minutes)" %
|
||||
self.logger.info(_("Log processing done (%0.2f minutes)") %
|
||||
((time.time() - start) / 60))
|
||||
return
|
||||
|
||||
@@ -358,7 +356,7 @@ class LogProcessorDaemon(Daemon):
|
||||
self.log_processor_container,
|
||||
'processed_files.pickle.gz')
|
||||
|
||||
self.logger.info("Log processing done (%0.2f minutes)" %
|
||||
self.logger.info(_("Log processing done (%0.2f minutes)") %
|
||||
((time.time() - start) / 60))
|
||||
|
||||
|
||||
|
||||
@@ -68,10 +68,10 @@ class LogUploader(Daemon):
|
||||
self.logger = utils.get_logger(uploader_conf, plugin_name)
|
||||
|
||||
def run_once(self):
|
||||
self.logger.info("Uploading logs")
|
||||
self.logger.info(_("Uploading logs"))
|
||||
start = time.time()
|
||||
self.upload_all_logs()
|
||||
self.logger.info("Uploading logs complete (%0.2f minutes)" %
|
||||
self.logger.info(_("Uploading logs complete (%0.2f minutes)") %
|
||||
((time.time() - start) / 60))
|
||||
|
||||
def upload_all_logs(self):
|
||||
@@ -126,22 +126,22 @@ class LogUploader(Daemon):
|
||||
hour = filename[slice(*hour_offset)]
|
||||
except IndexError:
|
||||
# unexpected filename format, move on
|
||||
self.logger.error("Unexpected log: %s" % filename)
|
||||
self.logger.error(_("Unexpected log: %s") % filename)
|
||||
continue
|
||||
if ((time.time() - os.stat(filename).st_mtime) <
|
||||
self.new_log_cutoff):
|
||||
# don't process very new logs
|
||||
self.logger.debug(
|
||||
"Skipping log: %s (< %d seconds old)" % (filename,
|
||||
self.new_log_cutoff))
|
||||
_("Skipping log: %(file)s (< %(cutoff)d seconds old)") %
|
||||
{'file': filename, 'cutoff': self.new_log_cutoff})
|
||||
continue
|
||||
self.upload_one_log(filename, year, month, day, hour)
|
||||
|
||||
def upload_one_log(self, filename, year, month, day, hour):
|
||||
if os.path.getsize(filename) == 0:
|
||||
self.logger.debug("Log %s is 0 length, skipping" % filename)
|
||||
self.logger.debug(_("Log %s is 0 length, skipping") % filename)
|
||||
return
|
||||
self.logger.debug("Processing log: %s" % filename)
|
||||
self.logger.debug(_("Processing log: %s") % filename)
|
||||
filehash = hashlib.md5()
|
||||
already_compressed = True if filename.endswith('.gz') else False
|
||||
opener = gzip.open if already_compressed else open
|
||||
@@ -162,9 +162,9 @@ class LogUploader(Daemon):
|
||||
self.container_name,
|
||||
target_filename,
|
||||
compress=(not already_compressed)):
|
||||
self.logger.debug("Uploaded log %s to %s" %
|
||||
(filename, target_filename))
|
||||
self.logger.debug(_("Uploaded log %(file)s to %(target)s") %
|
||||
{'file': filename, 'target': target_filename})
|
||||
if self.unlink_log:
|
||||
os.unlink(filename)
|
||||
else:
|
||||
self.logger.error("ERROR: Upload of log %s failed!" % filename)
|
||||
self.logger.error(_("ERROR: Upload of log %s failed!") % filename)
|
||||
|
||||
@@ -37,7 +37,7 @@ class StatsLogProcessor(object):
|
||||
bytes_used) = line.split(',')
|
||||
except (IndexError, ValueError):
|
||||
# bad line data
|
||||
self.logger.debug('Bad line data: %s' % repr(line))
|
||||
self.logger.debug(_('Bad line data: %s') % repr(line))
|
||||
continue
|
||||
account = account.strip('"')
|
||||
container_count = int(container_count.strip('"'))
|
||||
|
||||
Reference in New Issue
Block a user