2013-09-20 01:00:54 +08:00
|
|
|
# Copyright (c) 2011 OpenStack Foundation
|
2011-10-26 21:42:24 +00:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
|
|
# implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from time import time
|
|
|
|
from unittest import main, TestCase
|
2019-05-22 16:23:42 -05:00
|
|
|
from test.unit import FakeRing, mocked_http_conn, debug_logger, \
|
|
|
|
make_timestamp_iter
|
2014-06-06 11:35:34 -07:00
|
|
|
from tempfile import mkdtemp
|
|
|
|
from shutil import rmtree
|
2018-01-25 10:49:00 +00:00
|
|
|
from collections import defaultdict
|
2018-03-08 08:07:24 +00:00
|
|
|
from copy import deepcopy
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2013-04-15 22:12:23 +00:00
|
|
|
import mock
|
2015-10-08 13:08:45 +02:00
|
|
|
import six
|
2015-10-08 15:03:52 +02:00
|
|
|
from six.moves import urllib
|
2013-04-15 22:12:23 +00:00
|
|
|
|
2016-06-15 19:56:03 +03:00
|
|
|
from swift.common import internal_client, utils, swob
|
2018-01-26 07:54:31 +00:00
|
|
|
from swift.common.utils import Timestamp
|
2011-10-26 21:42:24 +00:00
|
|
|
from swift.obj import expirer
|
|
|
|
|
|
|
|
|
|
|
|
def not_random():
|
|
|
|
return 0.5
|
|
|
|
|
|
|
|
|
|
|
|
last_not_sleep = 0
|
|
|
|
|
|
|
|
|
|
|
|
def not_sleep(seconds):
|
|
|
|
global last_not_sleep
|
|
|
|
last_not_sleep = seconds
|
|
|
|
|
|
|
|
|
2018-01-25 10:49:00 +00:00
|
|
|
class FakeInternalClient(object):
|
|
|
|
container_ring = FakeRing()
|
|
|
|
|
|
|
|
def __init__(self, aco_dict):
|
|
|
|
"""
|
|
|
|
:param aco_dict: A dict of account ,container, object that
|
|
|
|
FakeInternalClient can return when each method called. Each account
|
2019-05-22 16:23:42 -05:00
|
|
|
has container name dict, and each container dict has a list of
|
|
|
|
objects in the container.
|
2018-01-25 10:49:00 +00:00
|
|
|
e.g. {'account1': {
|
2019-05-22 16:23:42 -05:00
|
|
|
'container1: ['obj1', 'obj2', {'name': 'obj3'}],
|
2018-01-25 10:49:00 +00:00
|
|
|
'container2: [],
|
|
|
|
},
|
|
|
|
'account2': {},
|
|
|
|
}
|
2019-05-22 16:23:42 -05:00
|
|
|
N.B. the objects entries should be the container-server JSON style
|
|
|
|
db rows, but this fake will dynamically detect when names are given
|
|
|
|
and wrap them for convenience.
|
2018-01-25 10:49:00 +00:00
|
|
|
"""
|
|
|
|
self.aco_dict = defaultdict(dict)
|
|
|
|
self.aco_dict.update(aco_dict)
|
|
|
|
|
|
|
|
def get_account_info(self, account):
|
2018-02-01 09:43:46 +00:00
|
|
|
acc_dict = self.aco_dict[account]
|
|
|
|
container_count = len(acc_dict)
|
|
|
|
obj_count = sum(len(objs) for objs in acc_dict.values())
|
|
|
|
return container_count, obj_count
|
2018-01-25 10:49:00 +00:00
|
|
|
|
|
|
|
def iter_containers(self, account, prefix=''):
|
|
|
|
acc_dict = self.aco_dict[account]
|
2019-05-14 17:10:54 -07:00
|
|
|
return [{'name': six.text_type(container)}
|
|
|
|
for container in sorted(acc_dict)
|
|
|
|
if container.startswith(prefix)]
|
2018-01-25 10:49:00 +00:00
|
|
|
|
|
|
|
def delete_container(*a, **kw):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def iter_objects(self, account, container):
|
|
|
|
acc_dict = self.aco_dict[account]
|
|
|
|
obj_iter = acc_dict.get(container, [])
|
2019-05-22 16:23:42 -05:00
|
|
|
resp = []
|
|
|
|
for obj in obj_iter:
|
|
|
|
if not isinstance(obj, dict):
|
|
|
|
obj = {'name': six.text_type(obj)}
|
|
|
|
resp.append(obj)
|
|
|
|
return resp
|
2018-01-25 10:49:00 +00:00
|
|
|
|
|
|
|
def make_request(*a, **kw):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
class TestObjectExpirer(TestCase):
|
Adding StatsD logging to Swift.
Documentation, including a list of metrics reported and their semantics,
is in the Admin Guide in a new section, "Reporting Metrics to StatsD".
An optional "metric prefix" may be configured which will be prepended to
every metric name sent to StatsD.
Here is the rationale for doing a deep integration like this versus only
sending metrics to StatsD in middleware. It's the only way to report
some internal activities of Swift in a real-time manner. So to have one
way of reporting to StatsD and one place/style of configuration, even
some things (like, say, timing of PUT requests into the proxy-server)
which could be logged via middleware are consistently logged the same
way (deep integration via the logger delegate methods).
When log_statsd_host is configured, get_logger() injects a
swift.common.utils.StatsdClient object into the logger as
logger.statsd_client. Then a set of delegate methods on LogAdapter
either pass through to the StatsdClient object or become no-ops. This
allows StatsD logging to look like:
self.logger.increment('some.metric.here')
and do the right thing in all cases and with no messy conditional logic.
I wanted to use the pystatsd module for the StatsD client, but the
version on PyPi is lagging the git repo (and is missing both the prefix
functionality and timing_since() method). So I wrote my
swift.common.utils.StatsdClient. The interface is the same as
pystatsd.Client, but the code was written from scratch. It's pretty
simple, and the tests I added cover it. This also frees Swift from an
optional dependency on the pystatsd module, making this feature easier
to enable.
There's test coverage for the new code and all existing tests continue
to pass.
Refactored out _one_audit_pass() method in swift/account/auditor.py and
swift/container/auditor.py.
Fixed some misc. PEP8 violations.
Misc test cleanups and refactorings (particularly the way "fake logging"
is handled).
Change-Id: Ie968a9ae8771f59ee7591e2ae11999c44bfe33b2
2012-04-01 16:47:08 -07:00
|
|
|
maxDiff = None
|
2014-09-25 00:48:49 +05:30
|
|
|
internal_client = None
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def setUp(self):
|
2012-05-04 18:07:22 +00:00
|
|
|
global not_sleep
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2012-05-04 18:07:22 +00:00
|
|
|
self.old_loadapp = internal_client.loadapp
|
|
|
|
self.old_sleep = internal_client.sleep
|
|
|
|
|
2014-02-27 18:30:15 -08:00
|
|
|
internal_client.loadapp = lambda *a, **kw: None
|
2012-05-04 18:07:22 +00:00
|
|
|
internal_client.sleep = not_sleep
|
|
|
|
|
2014-06-06 11:35:34 -07:00
|
|
|
self.rcache = mkdtemp()
|
2015-03-31 22:35:37 -07:00
|
|
|
self.conf = {'recon_cache_path': self.rcache}
|
2016-06-15 19:56:03 +03:00
|
|
|
self.logger = debug_logger('test-expirer')
|
2014-06-06 11:35:34 -07:00
|
|
|
|
2019-05-22 16:23:42 -05:00
|
|
|
self.ts = make_timestamp_iter()
|
2018-03-08 08:07:24 +00:00
|
|
|
self.past_time = str(int(time() - 86400))
|
|
|
|
self.future_time = str(int(time() + 86400))
|
|
|
|
# Dummy task queue for test
|
|
|
|
self.fake_swift = FakeInternalClient({
|
|
|
|
'.expiring_objects': {
|
|
|
|
# this task container will be checked
|
|
|
|
self.past_time: [
|
|
|
|
# tasks ready for execution
|
|
|
|
self.past_time + '-a0/c0/o0',
|
|
|
|
self.past_time + '-a1/c1/o1',
|
|
|
|
self.past_time + '-a2/c2/o2',
|
|
|
|
self.past_time + '-a3/c3/o3',
|
|
|
|
self.past_time + '-a4/c4/o4',
|
|
|
|
self.past_time + '-a5/c5/o5',
|
|
|
|
self.past_time + '-a6/c6/o6',
|
|
|
|
self.past_time + '-a7/c7/o7',
|
|
|
|
# task objects for unicode test
|
|
|
|
self.past_time + u'-a8/c8/o8\u2661',
|
|
|
|
self.past_time + u'-a9/c9/o9\xf8',
|
|
|
|
# this task will be skipped
|
|
|
|
self.future_time + '-a10/c10/o10'],
|
|
|
|
# this task container will be skipped
|
|
|
|
self.future_time: [
|
|
|
|
self.future_time + '-a11/c11/o11']}
|
|
|
|
})
|
|
|
|
self.expirer = expirer.ObjectExpirer(self.conf, logger=self.logger,
|
|
|
|
swift=self.fake_swift)
|
|
|
|
|
|
|
|
# target object paths which should be expirerd now
|
|
|
|
self.expired_target_path_list = [
|
2019-05-14 17:10:54 -07:00
|
|
|
swob.wsgi_to_str(tgt) for tgt in (
|
|
|
|
'a0/c0/o0', 'a1/c1/o1', 'a2/c2/o2', 'a3/c3/o3', 'a4/c4/o4',
|
|
|
|
'a5/c5/o5', 'a6/c6/o6', 'a7/c7/o7',
|
|
|
|
'a8/c8/o8\xe2\x99\xa1', 'a9/c9/o9\xc3\xb8',
|
|
|
|
)
|
2018-03-08 08:07:24 +00:00
|
|
|
]
|
|
|
|
|
2014-09-25 00:48:49 +05:30
|
|
|
def tearDown(self):
|
2014-06-06 11:35:34 -07:00
|
|
|
rmtree(self.rcache)
|
2012-05-04 18:07:22 +00:00
|
|
|
internal_client.sleep = self.old_sleep
|
2014-09-25 00:48:49 +05:30
|
|
|
internal_client.loadapp = self.old_loadapp
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2019-12-31 13:48:13 -06:00
|
|
|
def test_init(self):
|
|
|
|
x = expirer.ObjectExpirer({}, logger=self.logger)
|
|
|
|
self.assertEqual(self.logger.get_lines_for_level('warning'), [])
|
|
|
|
self.assertEqual(x.expiring_objects_account, '.expiring_objects')
|
|
|
|
x = expirer.ObjectExpirer({'auto_create_account_prefix': '-'},
|
|
|
|
logger=self.logger)
|
|
|
|
self.assertEqual(self.logger.get_lines_for_level('warning'), [
|
|
|
|
'Option auto_create_account_prefix is deprecated. '
|
|
|
|
'Configure auto_create_account_prefix under the '
|
|
|
|
'swift-constraints section of swift.conf. This option '
|
|
|
|
'will be ignored in a future release.'
|
|
|
|
])
|
|
|
|
self.assertEqual(x.expiring_objects_account, '-expiring_objects')
|
|
|
|
|
2013-05-01 21:11:25 +00:00
|
|
|
def test_get_process_values_from_kwargs(self):
|
|
|
|
x = expirer.ObjectExpirer({})
|
|
|
|
vals = {
|
|
|
|
'processes': 5,
|
|
|
|
'process': 1,
|
|
|
|
}
|
2014-08-15 15:54:05 -07:00
|
|
|
x.get_process_values(vals)
|
|
|
|
self.assertEqual(x.processes, 5)
|
|
|
|
self.assertEqual(x.process, 1)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
|
|
|
def test_get_process_values_from_config(self):
|
|
|
|
vals = {
|
|
|
|
'processes': 5,
|
|
|
|
'process': 1,
|
|
|
|
}
|
|
|
|
x = expirer.ObjectExpirer(vals)
|
2014-08-15 15:54:05 -07:00
|
|
|
x.get_process_values({})
|
|
|
|
self.assertEqual(x.processes, 5)
|
|
|
|
self.assertEqual(x.process, 1)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
|
|
|
def test_get_process_values_negative_process(self):
|
|
|
|
vals = {
|
|
|
|
'processes': 5,
|
|
|
|
'process': -1,
|
|
|
|
}
|
|
|
|
# from config
|
|
|
|
x = expirer.ObjectExpirer(vals)
|
2016-08-26 14:04:46 +08:00
|
|
|
expected_msg = 'process must be an integer greater' \
|
|
|
|
' than or equal to 0'
|
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values({})
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
2013-05-01 21:11:25 +00:00
|
|
|
# from kwargs
|
|
|
|
x = expirer.ObjectExpirer({})
|
2016-08-26 14:04:46 +08:00
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values(vals)
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
|
|
|
def test_get_process_values_negative_processes(self):
|
|
|
|
vals = {
|
|
|
|
'processes': -5,
|
|
|
|
'process': 1,
|
|
|
|
}
|
|
|
|
# from config
|
|
|
|
x = expirer.ObjectExpirer(vals)
|
2016-08-26 14:04:46 +08:00
|
|
|
expected_msg = 'processes must be an integer greater' \
|
|
|
|
' than or equal to 0'
|
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values({})
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
2013-05-01 21:11:25 +00:00
|
|
|
# from kwargs
|
|
|
|
x = expirer.ObjectExpirer({})
|
2016-08-26 14:04:46 +08:00
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values(vals)
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
|
|
|
def test_get_process_values_process_greater_than_processes(self):
|
|
|
|
vals = {
|
|
|
|
'processes': 5,
|
|
|
|
'process': 7,
|
|
|
|
}
|
|
|
|
# from config
|
|
|
|
x = expirer.ObjectExpirer(vals)
|
2016-08-26 14:04:46 +08:00
|
|
|
expected_msg = 'process must be less than processes'
|
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values({})
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
2013-05-01 21:11:25 +00:00
|
|
|
# from kwargs
|
|
|
|
x = expirer.ObjectExpirer({})
|
2016-08-26 14:04:46 +08:00
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values(vals)
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
2016-08-25 11:00:49 -07:00
|
|
|
def test_get_process_values_process_equal_to_processes(self):
|
|
|
|
vals = {
|
|
|
|
'processes': 5,
|
|
|
|
'process': 5,
|
|
|
|
}
|
|
|
|
# from config
|
|
|
|
x = expirer.ObjectExpirer(vals)
|
|
|
|
expected_msg = 'process must be less than processes'
|
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values({})
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
|
|
|
# from kwargs
|
|
|
|
x = expirer.ObjectExpirer({})
|
|
|
|
with self.assertRaises(ValueError) as ctx:
|
|
|
|
x.get_process_values(vals)
|
|
|
|
self.assertEqual(str(ctx.exception), expected_msg)
|
|
|
|
|
2013-05-01 21:11:25 +00:00
|
|
|
def test_init_concurrency_too_small(self):
|
|
|
|
conf = {
|
|
|
|
'concurrency': 0,
|
|
|
|
}
|
|
|
|
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
|
|
|
|
conf = {
|
|
|
|
'concurrency': -1,
|
|
|
|
}
|
|
|
|
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
|
|
|
|
|
|
|
|
def test_process_based_concurrency(self):
|
2013-08-31 23:42:43 -04:00
|
|
|
|
2013-05-01 21:11:25 +00:00
|
|
|
class ObjectExpirer(expirer.ObjectExpirer):
|
2013-08-31 23:42:43 -04:00
|
|
|
|
2018-01-25 10:49:00 +00:00
|
|
|
def __init__(self, conf, swift):
|
|
|
|
super(ObjectExpirer, self).__init__(conf, swift=swift)
|
2013-05-01 21:11:25 +00:00
|
|
|
self.processes = 3
|
|
|
|
self.deleted_objects = {}
|
|
|
|
|
2018-01-26 07:54:31 +00:00
|
|
|
def delete_object(self, target_path, delete_timestamp,
|
2019-03-27 15:28:50 -07:00
|
|
|
task_account, task_container, task_object,
|
|
|
|
is_async_delete):
|
2018-01-26 07:54:31 +00:00
|
|
|
if task_container not in self.deleted_objects:
|
|
|
|
self.deleted_objects[task_container] = set()
|
|
|
|
self.deleted_objects[task_container].add(task_object)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
x = ObjectExpirer(self.conf, swift=self.fake_swift)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
2018-02-22 11:08:49 -08:00
|
|
|
deleted_objects = defaultdict(set)
|
2015-05-25 18:28:02 +02:00
|
|
|
for i in range(3):
|
2013-05-01 21:11:25 +00:00
|
|
|
x.process = i
|
2018-02-22 11:08:49 -08:00
|
|
|
# reset progress so we know we don't double-up work among processes
|
|
|
|
x.deleted_objects = defaultdict(set)
|
2013-05-01 21:11:25 +00:00
|
|
|
x.run_once()
|
2018-02-22 11:08:49 -08:00
|
|
|
for task_container, deleted in x.deleted_objects.items():
|
|
|
|
self.assertFalse(deleted_objects[task_container] & deleted)
|
|
|
|
deleted_objects[task_container] |= deleted
|
2018-03-08 08:07:24 +00:00
|
|
|
|
|
|
|
# sort for comparison
|
|
|
|
deleted_objects = {
|
|
|
|
con: sorted(o_set) for con, o_set in deleted_objects.items()}
|
|
|
|
expected = {
|
|
|
|
self.past_time: [
|
|
|
|
self.past_time + '-' + target_path
|
|
|
|
for target_path in self.expired_target_path_list]}
|
|
|
|
self.assertEqual(deleted_objects, expected)
|
2013-05-01 21:11:25 +00:00
|
|
|
|
|
|
|
def test_delete_object(self):
|
2016-06-15 19:56:03 +03:00
|
|
|
x = expirer.ObjectExpirer({}, logger=self.logger)
|
|
|
|
actual_obj = 'actual_obj'
|
|
|
|
timestamp = int(time())
|
|
|
|
reclaim_ts = timestamp - x.reclaim_age
|
2018-02-13 09:40:18 +00:00
|
|
|
account = 'account'
|
2013-05-01 21:11:25 +00:00
|
|
|
container = 'container'
|
|
|
|
obj = 'obj'
|
|
|
|
|
2016-06-15 19:56:03 +03:00
|
|
|
http_exc = {
|
|
|
|
resp_code:
|
|
|
|
internal_client.UnexpectedResponse(
|
|
|
|
str(resp_code), swob.HTTPException(status=resp_code))
|
|
|
|
for resp_code in {404, 412, 500}
|
|
|
|
}
|
|
|
|
exc_other = Exception()
|
|
|
|
|
|
|
|
def check_call_to_delete_object(exc, ts, should_pop):
|
|
|
|
x.logger.clear()
|
|
|
|
start_reports = x.report_objects
|
|
|
|
with mock.patch.object(x, 'delete_actual_object',
|
|
|
|
side_effect=exc) as delete_actual:
|
|
|
|
with mock.patch.object(x, 'pop_queue') as pop_queue:
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_object(actual_obj, ts, account, container, obj,
|
|
|
|
False)
|
2016-06-15 19:56:03 +03:00
|
|
|
|
2019-03-27 15:28:50 -07:00
|
|
|
delete_actual.assert_called_once_with(actual_obj, ts, False)
|
2016-06-15 19:56:03 +03:00
|
|
|
log_lines = x.logger.get_lines_for_level('error')
|
|
|
|
if should_pop:
|
2018-02-13 09:40:18 +00:00
|
|
|
pop_queue.assert_called_once_with(account, container, obj)
|
2016-06-15 19:56:03 +03:00
|
|
|
self.assertEqual(start_reports + 1, x.report_objects)
|
|
|
|
self.assertFalse(log_lines)
|
|
|
|
else:
|
|
|
|
self.assertFalse(pop_queue.called)
|
|
|
|
self.assertEqual(start_reports, x.report_objects)
|
|
|
|
self.assertEqual(1, len(log_lines))
|
2017-08-23 07:25:09 +00:00
|
|
|
if isinstance(exc, internal_client.UnexpectedResponse):
|
|
|
|
self.assertEqual(
|
|
|
|
log_lines[0],
|
2018-02-13 09:40:18 +00:00
|
|
|
'Unexpected response while deleting object '
|
|
|
|
'account container obj: %s' % exc.resp.status_int)
|
2017-08-23 07:25:09 +00:00
|
|
|
else:
|
|
|
|
self.assertTrue(log_lines[0].startswith(
|
2018-02-13 09:40:18 +00:00
|
|
|
'Exception while deleting object '
|
|
|
|
'account container obj'))
|
2016-06-15 19:56:03 +03:00
|
|
|
|
|
|
|
# verify pop_queue logic on exceptions
|
|
|
|
for exc, ts, should_pop in [(None, timestamp, True),
|
|
|
|
(http_exc[404], timestamp, False),
|
|
|
|
(http_exc[412], timestamp, False),
|
|
|
|
(http_exc[500], reclaim_ts, False),
|
|
|
|
(exc_other, reclaim_ts, False),
|
|
|
|
(http_exc[404], reclaim_ts, True),
|
|
|
|
(http_exc[412], reclaim_ts, True)]:
|
|
|
|
|
|
|
|
try:
|
|
|
|
check_call_to_delete_object(exc, ts, should_pop)
|
|
|
|
except AssertionError as err:
|
|
|
|
self.fail("Failed on %r at %f: %s" % (exc, ts, err))
|
2013-05-01 21:11:25 +00:00
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
def test_report(self):
|
2014-06-06 11:35:34 -07:00
|
|
|
x = expirer.ObjectExpirer({}, logger=self.logger)
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
x.report()
|
2015-03-31 22:35:37 -07:00
|
|
|
self.assertEqual(x.logger.get_lines_for_level('info'), [])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
Adding StatsD logging to Swift.
Documentation, including a list of metrics reported and their semantics,
is in the Admin Guide in a new section, "Reporting Metrics to StatsD".
An optional "metric prefix" may be configured which will be prepended to
every metric name sent to StatsD.
Here is the rationale for doing a deep integration like this versus only
sending metrics to StatsD in middleware. It's the only way to report
some internal activities of Swift in a real-time manner. So to have one
way of reporting to StatsD and one place/style of configuration, even
some things (like, say, timing of PUT requests into the proxy-server)
which could be logged via middleware are consistently logged the same
way (deep integration via the logger delegate methods).
When log_statsd_host is configured, get_logger() injects a
swift.common.utils.StatsdClient object into the logger as
logger.statsd_client. Then a set of delegate methods on LogAdapter
either pass through to the StatsdClient object or become no-ops. This
allows StatsD logging to look like:
self.logger.increment('some.metric.here')
and do the right thing in all cases and with no messy conditional logic.
I wanted to use the pystatsd module for the StatsD client, but the
version on PyPi is lagging the git repo (and is missing both the prefix
functionality and timing_since() method). So I wrote my
swift.common.utils.StatsdClient. The interface is the same as
pystatsd.Client, but the code was written from scratch. It's pretty
simple, and the tests I added cover it. This also frees Swift from an
optional dependency on the pystatsd module, making this feature easier
to enable.
There's test coverage for the new code and all existing tests continue
to pass.
Refactored out _one_audit_pass() method in swift/account/auditor.py and
swift/container/auditor.py.
Fixed some misc. PEP8 violations.
Misc test cleanups and refactorings (particularly the way "fake logging"
is handled).
Change-Id: Ie968a9ae8771f59ee7591e2ae11999c44bfe33b2
2012-04-01 16:47:08 -07:00
|
|
|
x.logger._clear()
|
2011-10-26 21:42:24 +00:00
|
|
|
x.report(final=True)
|
2015-03-31 22:35:37 -07:00
|
|
|
self.assertTrue(
|
|
|
|
'completed' in str(x.logger.get_lines_for_level('info')))
|
|
|
|
self.assertTrue(
|
|
|
|
'so far' not in str(x.logger.get_lines_for_level('info')))
|
2011-10-26 21:42:24 +00:00
|
|
|
|
Adding StatsD logging to Swift.
Documentation, including a list of metrics reported and their semantics,
is in the Admin Guide in a new section, "Reporting Metrics to StatsD".
An optional "metric prefix" may be configured which will be prepended to
every metric name sent to StatsD.
Here is the rationale for doing a deep integration like this versus only
sending metrics to StatsD in middleware. It's the only way to report
some internal activities of Swift in a real-time manner. So to have one
way of reporting to StatsD and one place/style of configuration, even
some things (like, say, timing of PUT requests into the proxy-server)
which could be logged via middleware are consistently logged the same
way (deep integration via the logger delegate methods).
When log_statsd_host is configured, get_logger() injects a
swift.common.utils.StatsdClient object into the logger as
logger.statsd_client. Then a set of delegate methods on LogAdapter
either pass through to the StatsdClient object or become no-ops. This
allows StatsD logging to look like:
self.logger.increment('some.metric.here')
and do the right thing in all cases and with no messy conditional logic.
I wanted to use the pystatsd module for the StatsD client, but the
version on PyPi is lagging the git repo (and is missing both the prefix
functionality and timing_since() method). So I wrote my
swift.common.utils.StatsdClient. The interface is the same as
pystatsd.Client, but the code was written from scratch. It's pretty
simple, and the tests I added cover it. This also frees Swift from an
optional dependency on the pystatsd module, making this feature easier
to enable.
There's test coverage for the new code and all existing tests continue
to pass.
Refactored out _one_audit_pass() method in swift/account/auditor.py and
swift/container/auditor.py.
Fixed some misc. PEP8 violations.
Misc test cleanups and refactorings (particularly the way "fake logging"
is handled).
Change-Id: Ie968a9ae8771f59ee7591e2ae11999c44bfe33b2
2012-04-01 16:47:08 -07:00
|
|
|
x.logger._clear()
|
2011-10-26 21:42:24 +00:00
|
|
|
x.report_last_time = time() - x.report_interval
|
|
|
|
x.report()
|
2015-03-31 22:35:37 -07:00
|
|
|
self.assertTrue(
|
|
|
|
'completed' not in str(x.logger.get_lines_for_level('info')))
|
|
|
|
self.assertTrue(
|
|
|
|
'so far' in str(x.logger.get_lines_for_level('info')))
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-02-13 09:40:18 +00:00
|
|
|
def test_parse_task_obj(self):
|
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
|
|
|
|
|
|
|
def assert_parse_task_obj(task_obj, expected_delete_at,
|
|
|
|
expected_account, expected_container,
|
|
|
|
expected_obj):
|
|
|
|
delete_at, account, container, obj = x.parse_task_obj(task_obj)
|
|
|
|
self.assertEqual(delete_at, expected_delete_at)
|
|
|
|
self.assertEqual(account, expected_account)
|
|
|
|
self.assertEqual(container, expected_container)
|
|
|
|
self.assertEqual(obj, expected_obj)
|
|
|
|
|
|
|
|
assert_parse_task_obj('0000-a/c/o', 0, 'a', 'c', 'o')
|
|
|
|
assert_parse_task_obj('0001-a/c/o', 1, 'a', 'c', 'o')
|
|
|
|
assert_parse_task_obj('1000-a/c/o', 1000, 'a', 'c', 'o')
|
|
|
|
assert_parse_task_obj('0000-acc/con/obj', 0, 'acc', 'con', 'obj')
|
|
|
|
|
2019-03-27 15:28:50 -07:00
|
|
|
def make_task(self, delete_at, target, is_async_delete=False):
|
2018-03-08 08:07:24 +00:00
|
|
|
return {
|
|
|
|
'task_account': '.expiring_objects',
|
|
|
|
'task_container': delete_at,
|
|
|
|
'task_object': delete_at + '-' + target,
|
|
|
|
'delete_timestamp': Timestamp(delete_at),
|
|
|
|
'target_path': target,
|
2019-03-27 15:28:50 -07:00
|
|
|
'is_async_delete': is_async_delete,
|
2018-03-08 08:07:24 +00:00
|
|
|
}
|
2018-02-22 10:26:00 -08:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
def test_round_robin_order(self):
|
2018-01-26 07:54:31 +00:00
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
|
|
|
task_con_obj_list = [
|
|
|
|
# objects in 0000 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'a/c0/o0'),
|
|
|
|
self.make_task('0000', 'a/c0/o1'),
|
2018-01-26 07:54:31 +00:00
|
|
|
# objects in 0001 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0001', 'a/c1/o0'),
|
|
|
|
self.make_task('0001', 'a/c1/o1'),
|
2018-01-26 07:54:31 +00:00
|
|
|
# objects in 0002 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0002', 'a/c2/o0'),
|
|
|
|
self.make_task('0002', 'a/c2/o1'),
|
2018-01-26 07:54:31 +00:00
|
|
|
]
|
|
|
|
result = list(x.round_robin_order(task_con_obj_list))
|
|
|
|
|
2018-02-22 10:26:00 -08:00
|
|
|
# sorted by popping one object to delete for each target_container
|
2018-01-26 07:54:31 +00:00
|
|
|
expected = [
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'a/c0/o0'),
|
|
|
|
self.make_task('0001', 'a/c1/o0'),
|
|
|
|
self.make_task('0002', 'a/c2/o0'),
|
|
|
|
self.make_task('0000', 'a/c0/o1'),
|
|
|
|
self.make_task('0001', 'a/c1/o1'),
|
|
|
|
self.make_task('0002', 'a/c2/o1'),
|
2018-02-22 10:26:00 -08:00
|
|
|
]
|
|
|
|
self.assertEqual(expected, result)
|
|
|
|
|
2018-02-13 05:16:27 +00:00
|
|
|
# task containers have some task objects with invalid target paths
|
|
|
|
task_con_obj_list = [
|
|
|
|
# objects in 0000 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'invalid0'),
|
|
|
|
self.make_task('0000', 'a/c0/o0'),
|
|
|
|
self.make_task('0000', 'a/c0/o1'),
|
2018-02-13 05:16:27 +00:00
|
|
|
# objects in 0001 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0001', 'a/c1/o0'),
|
|
|
|
self.make_task('0001', 'invalid1'),
|
|
|
|
self.make_task('0001', 'a/c1/o1'),
|
2018-02-13 05:16:27 +00:00
|
|
|
# objects in 0002 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0002', 'a/c2/o0'),
|
|
|
|
self.make_task('0002', 'a/c2/o1'),
|
|
|
|
self.make_task('0002', 'invalid2'),
|
2018-02-13 05:16:27 +00:00
|
|
|
]
|
|
|
|
result = list(x.round_robin_order(task_con_obj_list))
|
|
|
|
|
|
|
|
# the invalid task objects are ignored
|
|
|
|
expected = [
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'a/c0/o0'),
|
|
|
|
self.make_task('0001', 'a/c1/o0'),
|
|
|
|
self.make_task('0002', 'a/c2/o0'),
|
|
|
|
self.make_task('0000', 'a/c0/o1'),
|
|
|
|
self.make_task('0001', 'a/c1/o1'),
|
|
|
|
self.make_task('0002', 'a/c2/o1'),
|
2018-02-13 05:16:27 +00:00
|
|
|
]
|
|
|
|
self.assertEqual(expected, result)
|
|
|
|
|
2018-02-22 10:26:00 -08:00
|
|
|
# for a given target container, tasks won't necessarily all go in
|
|
|
|
# the same timestamp container
|
|
|
|
task_con_obj_list = [
|
2018-01-26 07:54:31 +00:00
|
|
|
# objects in 0000 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'a/c0/o0'),
|
|
|
|
self.make_task('0000', 'a/c0/o1'),
|
|
|
|
self.make_task('0000', 'a/c2/o2'),
|
|
|
|
self.make_task('0000', 'a/c2/o3'),
|
2018-02-22 10:26:00 -08:00
|
|
|
# objects in 0001 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0001', 'a/c0/o2'),
|
|
|
|
self.make_task('0001', 'a/c0/o3'),
|
|
|
|
self.make_task('0001', 'a/c1/o0'),
|
|
|
|
self.make_task('0001', 'a/c1/o1'),
|
2018-02-22 10:26:00 -08:00
|
|
|
# objects in 0002 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0002', 'a/c2/o0'),
|
|
|
|
self.make_task('0002', 'a/c2/o1'),
|
2018-02-22 10:26:00 -08:00
|
|
|
]
|
|
|
|
result = list(x.round_robin_order(task_con_obj_list))
|
|
|
|
|
|
|
|
# so we go around popping by *target* container, not *task* container
|
|
|
|
expected = [
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'a/c0/o0'),
|
|
|
|
self.make_task('0001', 'a/c1/o0'),
|
|
|
|
self.make_task('0000', 'a/c2/o2'),
|
|
|
|
self.make_task('0000', 'a/c0/o1'),
|
|
|
|
self.make_task('0001', 'a/c1/o1'),
|
|
|
|
self.make_task('0000', 'a/c2/o3'),
|
|
|
|
self.make_task('0001', 'a/c0/o2'),
|
|
|
|
self.make_task('0002', 'a/c2/o0'),
|
|
|
|
self.make_task('0001', 'a/c0/o3'),
|
|
|
|
self.make_task('0002', 'a/c2/o1'),
|
2018-01-26 07:54:31 +00:00
|
|
|
]
|
|
|
|
self.assertEqual(expected, result)
|
|
|
|
|
2018-02-22 10:26:00 -08:00
|
|
|
# all of the work to be done could be for different target containers
|
|
|
|
task_con_obj_list = [
|
|
|
|
# objects in 0000 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0000', 'a/c0/o'),
|
|
|
|
self.make_task('0000', 'a/c1/o'),
|
|
|
|
self.make_task('0000', 'a/c2/o'),
|
|
|
|
self.make_task('0000', 'a/c3/o'),
|
2018-02-22 10:26:00 -08:00
|
|
|
# objects in 0001 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0001', 'a/c4/o'),
|
|
|
|
self.make_task('0001', 'a/c5/o'),
|
|
|
|
self.make_task('0001', 'a/c6/o'),
|
|
|
|
self.make_task('0001', 'a/c7/o'),
|
2018-02-22 10:26:00 -08:00
|
|
|
# objects in 0002 timestamp container
|
2018-03-08 08:07:24 +00:00
|
|
|
self.make_task('0002', 'a/c8/o'),
|
|
|
|
self.make_task('0002', 'a/c9/o'),
|
2018-02-22 10:26:00 -08:00
|
|
|
]
|
|
|
|
result = list(x.round_robin_order(task_con_obj_list))
|
|
|
|
|
|
|
|
# in which case, we kind of hammer the task containers
|
|
|
|
self.assertEqual(task_con_obj_list, result)
|
|
|
|
|
2018-02-13 09:40:18 +00:00
|
|
|
def test_hash_mod(self):
|
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
|
|
|
mod_count = [0, 0, 0]
|
|
|
|
for i in range(1000):
|
|
|
|
name = 'obj%d' % i
|
|
|
|
mod = x.hash_mod(name, 3)
|
|
|
|
mod_count[mod] += 1
|
|
|
|
|
|
|
|
# 1000 names are well shuffled
|
|
|
|
self.assertGreater(mod_count[0], 300)
|
|
|
|
self.assertGreater(mod_count[1], 300)
|
|
|
|
self.assertGreater(mod_count[2], 300)
|
|
|
|
|
|
|
|
def test_iter_task_accounts_to_expire(self):
|
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
|
|
|
results = [_ for _ in x.iter_task_accounts_to_expire()]
|
|
|
|
self.assertEqual(results, [('.expiring_objects', 0, 1)])
|
|
|
|
|
|
|
|
self.conf['processes'] = '2'
|
|
|
|
self.conf['process'] = '1'
|
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
|
|
|
results = [_ for _ in x.iter_task_accounts_to_expire()]
|
|
|
|
self.assertEqual(results, [('.expiring_objects', 1, 2)])
|
|
|
|
|
|
|
|
def test_delete_at_time_of_task_container(self):
|
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
|
|
|
self.assertEqual(x.delete_at_time_of_task_container('0000'), 0)
|
|
|
|
self.assertEqual(x.delete_at_time_of_task_container('0001'), 1)
|
|
|
|
self.assertEqual(x.delete_at_time_of_task_container('1000'), 1000)
|
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
def test_run_once_nothing_to_do(self):
|
2015-03-31 22:35:37 -07:00
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
|
2012-05-04 18:07:22 +00:00
|
|
|
x.swift = 'throw error because a string does not have needed methods'
|
2011-10-26 21:42:24 +00:00
|
|
|
x.run_once()
|
2015-03-31 22:35:37 -07:00
|
|
|
self.assertEqual(x.logger.get_lines_for_level('error'),
|
|
|
|
["Unhandled exception: "])
|
|
|
|
log_args, log_kwargs = x.logger.log_dict['error'][0]
|
|
|
|
self.assertEqual(str(log_kwargs['exc_info'][1]),
|
|
|
|
"'str' object has no attribute 'get_account_info'")
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_run_once_calls_report(self):
|
2018-03-08 08:07:24 +00:00
|
|
|
with mock.patch.object(self.expirer, 'pop_queue',
|
|
|
|
lambda a, c, o: None):
|
|
|
|
self.expirer.run_once()
|
2018-02-13 09:40:18 +00:00
|
|
|
self.assertEqual(
|
2018-03-08 08:07:24 +00:00
|
|
|
self.expirer.logger.get_lines_for_level('info'), [
|
2018-02-13 09:40:18 +00:00
|
|
|
'Pass beginning for task account .expiring_objects; '
|
2018-03-08 08:07:24 +00:00
|
|
|
'2 possible containers; 12 possible objects',
|
|
|
|
'Pass completed in 0s; 10 objects expired',
|
2018-02-13 09:40:18 +00:00
|
|
|
])
|
|
|
|
|
|
|
|
def test_skip_task_account_without_task_container(self):
|
|
|
|
fake_swift = FakeInternalClient({
|
|
|
|
# task account has no containers
|
|
|
|
'.expiring_objects': dict()
|
|
|
|
})
|
2018-01-25 10:49:00 +00:00
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
|
|
|
|
swift=fake_swift)
|
2011-10-26 21:42:24 +00:00
|
|
|
x.run_once()
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(
|
2015-03-31 22:35:37 -07:00
|
|
|
x.logger.get_lines_for_level('info'), [
|
|
|
|
'Pass completed in 0s; 0 objects expired',
|
|
|
|
])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-02-13 09:40:18 +00:00
|
|
|
def test_iter_task_to_expire(self):
|
|
|
|
# In this test, all tasks are assigned to the tested expirer
|
|
|
|
my_index = 0
|
|
|
|
divisor = 1
|
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
task_account_container_list = [('.expiring_objects', self.past_time)]
|
2018-02-13 09:40:18 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
expected = [
|
|
|
|
self.make_task(self.past_time, target_path)
|
|
|
|
for target_path in self.expired_target_path_list]
|
2018-02-13 09:40:18 +00:00
|
|
|
|
|
|
|
self.assertEqual(
|
2018-03-08 08:07:24 +00:00
|
|
|
list(self.expirer.iter_task_to_expire(
|
2018-02-13 09:40:18 +00:00
|
|
|
task_account_container_list, my_index, divisor)),
|
|
|
|
expected)
|
|
|
|
|
|
|
|
# the task queue has invalid task object
|
2018-03-08 08:07:24 +00:00
|
|
|
invalid_aco_dict = deepcopy(self.fake_swift.aco_dict)
|
|
|
|
invalid_aco_dict['.expiring_objects'][self.past_time].insert(
|
|
|
|
0, self.past_time + '-invalid0')
|
|
|
|
invalid_aco_dict['.expiring_objects'][self.past_time].insert(
|
|
|
|
5, self.past_time + '-invalid1')
|
|
|
|
invalid_fake_swift = FakeInternalClient(invalid_aco_dict)
|
2018-02-13 09:40:18 +00:00
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
|
2018-03-08 08:07:24 +00:00
|
|
|
swift=invalid_fake_swift)
|
2018-02-13 09:40:18 +00:00
|
|
|
|
|
|
|
# but the invalid tasks are skipped
|
|
|
|
self.assertEqual(
|
|
|
|
list(x.iter_task_to_expire(
|
|
|
|
task_account_container_list, my_index, divisor)),
|
|
|
|
expected)
|
|
|
|
|
2019-05-22 16:23:42 -05:00
|
|
|
# test some of that async delete
|
|
|
|
async_delete_aco_dict = {
|
|
|
|
'.expiring_objects': {
|
|
|
|
# this task container will be checked
|
|
|
|
self.past_time: [
|
|
|
|
# tasks ready for execution
|
|
|
|
{'name': self.past_time + '-a0/c0/o0',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a1/c1/o1',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a2/c2/o2',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a3/c3/o3',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a4/c4/o4',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a5/c5/o5',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a6/c6/o6',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + '-a7/c7/o7',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
# task objects for unicode test
|
|
|
|
{'name': self.past_time + u'-a8/c8/o8\u2661',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
{'name': self.past_time + u'-a9/c9/o9\xf8',
|
|
|
|
'content_type': 'application/async-deleted'},
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
async_delete_fake_swift = FakeInternalClient(async_delete_aco_dict)
|
|
|
|
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
|
|
|
|
swift=async_delete_fake_swift)
|
|
|
|
|
|
|
|
expected = [
|
|
|
|
self.make_task(self.past_time, target_path,
|
|
|
|
is_async_delete=True)
|
|
|
|
for target_path in self.expired_target_path_list]
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
list(x.iter_task_to_expire(
|
|
|
|
task_account_container_list, my_index, divisor)),
|
|
|
|
expected)
|
|
|
|
|
2014-10-10 02:04:44 -07:00
|
|
|
def test_run_once_unicode_problem(self):
|
|
|
|
requests = []
|
|
|
|
|
|
|
|
def capture_requests(ipaddr, port, method, path, *args, **kwargs):
|
|
|
|
requests.append((method, path))
|
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
# 3 DELETE requests for each 10 executed task objects to pop_queue
|
|
|
|
code_list = [200] * 3 * 10
|
|
|
|
with mocked_http_conn(*code_list, give_connect=capture_requests):
|
|
|
|
self.expirer.run_once()
|
|
|
|
self.assertEqual(len(requests), 30)
|
2014-10-10 02:04:44 -07:00
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
def test_container_timestamp_break(self):
|
2018-03-08 08:07:24 +00:00
|
|
|
with mock.patch.object(self.fake_swift, 'iter_objects') as mock_method:
|
|
|
|
self.expirer.run_once()
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
# iter_objects is called only for past_time, not future_time
|
|
|
|
self.assertEqual(mock_method.call_args_list,
|
|
|
|
[mock.call('.expiring_objects', self.past_time)])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_object_timestamp_break(self):
|
2018-03-08 08:07:24 +00:00
|
|
|
with mock.patch.object(self.expirer, 'delete_actual_object') \
|
|
|
|
as mock_method, \
|
|
|
|
mock.patch.object(self.expirer, 'pop_queue'):
|
|
|
|
self.expirer.run_once()
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
# executed tasks are with past time
|
2013-08-31 23:42:43 -04:00
|
|
|
self.assertEqual(
|
2018-03-08 08:07:24 +00:00
|
|
|
mock_method.call_args_list,
|
2019-03-27 15:28:50 -07:00
|
|
|
[mock.call(target_path, self.past_time, False)
|
2018-03-08 08:07:24 +00:00
|
|
|
for target_path in self.expired_target_path_list])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_failed_delete_keeps_entry(self):
|
|
|
|
def deliberately_blow_up(actual_obj, timestamp):
|
|
|
|
raise Exception('failed to delete actual object')
|
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
# any tasks are not done
|
|
|
|
with mock.patch.object(self.expirer, 'delete_actual_object',
|
|
|
|
deliberately_blow_up), \
|
|
|
|
mock.patch.object(self.expirer, 'pop_queue') as mock_method:
|
|
|
|
self.expirer.run_once()
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
# no tasks are popped from the queue
|
|
|
|
self.assertEqual(mock_method.call_args_list, [])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
# all tasks are done
|
|
|
|
with mock.patch.object(self.expirer, 'delete_actual_object',
|
2019-03-27 15:28:50 -07:00
|
|
|
lambda o, t, b: None), \
|
2018-03-08 08:07:24 +00:00
|
|
|
mock.patch.object(self.expirer, 'pop_queue') as mock_method:
|
|
|
|
self.expirer.run_once()
|
|
|
|
|
|
|
|
# all tasks are popped from the queue
|
2013-08-31 23:42:43 -04:00
|
|
|
self.assertEqual(
|
2018-03-08 08:07:24 +00:00
|
|
|
mock_method.call_args_list,
|
|
|
|
[mock.call('.expiring_objects', self.past_time,
|
|
|
|
self.past_time + '-' + target_path)
|
|
|
|
for target_path in self.expired_target_path_list])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_success_gets_counted(self):
|
2018-03-08 08:07:24 +00:00
|
|
|
self.assertEqual(self.expirer.report_objects, 0)
|
|
|
|
with mock.patch('swift.obj.expirer.MAX_OBJECTS_TO_CACHE', 0), \
|
|
|
|
mock.patch.object(self.expirer, 'delete_actual_object',
|
2019-03-27 15:28:50 -07:00
|
|
|
lambda o, t, b: None), \
|
2018-03-08 08:07:24 +00:00
|
|
|
mock.patch.object(self.expirer, 'pop_queue',
|
|
|
|
lambda a, c, o: None):
|
|
|
|
self.expirer.run_once()
|
|
|
|
self.assertEqual(self.expirer.report_objects, 10)
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2019-05-14 17:10:54 -07:00
|
|
|
def test_delete_actual_object_gets_native_string(self):
|
|
|
|
got_str = [False]
|
2012-11-07 21:49:26 +00:00
|
|
|
|
2019-03-27 15:28:50 -07:00
|
|
|
def delete_actual_object_test_for_string(actual_obj, timestamp,
|
|
|
|
is_async_delete):
|
2019-05-14 17:10:54 -07:00
|
|
|
if isinstance(actual_obj, str):
|
|
|
|
got_str[0] = True
|
2012-11-07 21:49:26 +00:00
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
self.assertEqual(self.expirer.report_objects, 0)
|
|
|
|
|
|
|
|
with mock.patch.object(self.expirer, 'delete_actual_object',
|
2019-05-14 17:10:54 -07:00
|
|
|
delete_actual_object_test_for_string), \
|
2018-03-08 08:07:24 +00:00
|
|
|
mock.patch.object(self.expirer, 'pop_queue',
|
|
|
|
lambda a, c, o: None):
|
|
|
|
self.expirer.run_once()
|
|
|
|
|
|
|
|
self.assertEqual(self.expirer.report_objects, 10)
|
2019-05-14 17:10:54 -07:00
|
|
|
self.assertTrue(got_str[0])
|
2012-11-07 21:49:26 +00:00
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
def test_failed_delete_continues_on(self):
|
2018-01-25 10:49:00 +00:00
|
|
|
def fail_delete_container(*a, **kw):
|
|
|
|
raise Exception('failed to delete container')
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2019-03-27 15:28:50 -07:00
|
|
|
def fail_delete_actual_object(actual_obj, timestamp, is_async_delete):
|
2011-10-26 21:42:24 +00:00
|
|
|
raise Exception('failed to delete actual object')
|
|
|
|
|
2018-03-08 08:07:24 +00:00
|
|
|
with mock.patch.object(self.fake_swift, 'delete_container',
|
|
|
|
fail_delete_container), \
|
|
|
|
mock.patch.object(self.expirer, 'delete_actual_object',
|
|
|
|
fail_delete_actual_object):
|
|
|
|
self.expirer.run_once()
|
|
|
|
|
|
|
|
error_lines = self.expirer.logger.get_lines_for_level('error')
|
|
|
|
|
|
|
|
self.assertEqual(error_lines, [
|
|
|
|
'Exception while deleting object %s %s %s '
|
|
|
|
'failed to delete actual object: ' % (
|
|
|
|
'.expiring_objects', self.past_time,
|
|
|
|
self.past_time + '-' + target_path)
|
|
|
|
for target_path in self.expired_target_path_list] + [
|
|
|
|
'Exception while deleting container %s %s '
|
|
|
|
'failed to delete container: ' % (
|
|
|
|
'.expiring_objects', self.past_time)])
|
|
|
|
self.assertEqual(self.expirer.logger.get_lines_for_level('info'), [
|
2018-02-13 09:40:18 +00:00
|
|
|
'Pass beginning for task account .expiring_objects; '
|
2018-03-08 08:07:24 +00:00
|
|
|
'2 possible containers; 12 possible objects',
|
2015-03-31 22:35:37 -07:00
|
|
|
'Pass completed in 0s; 0 objects expired',
|
|
|
|
])
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_run_forever_initial_sleep_random(self):
|
|
|
|
global last_not_sleep
|
|
|
|
|
|
|
|
def raise_system_exit():
|
|
|
|
raise SystemExit('test_run_forever')
|
|
|
|
|
|
|
|
interval = 1234
|
|
|
|
x = expirer.ObjectExpirer({'__file__': 'unit_test',
|
|
|
|
'interval': interval})
|
2019-05-14 17:10:54 -07:00
|
|
|
with mock.patch.object(expirer, 'random', not_random), \
|
|
|
|
mock.patch.object(expirer, 'sleep', not_sleep), \
|
|
|
|
self.assertRaises(SystemExit) as caught:
|
2011-10-26 21:42:24 +00:00
|
|
|
x.run_once = raise_system_exit
|
|
|
|
x.run_forever()
|
2019-05-14 17:10:54 -07:00
|
|
|
self.assertEqual(str(caught.exception), 'test_run_forever')
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(last_not_sleep, 0.5 * interval)
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_run_forever_catches_usual_exceptions(self):
|
|
|
|
raises = [0]
|
|
|
|
|
|
|
|
def raise_exceptions():
|
|
|
|
raises[0] += 1
|
|
|
|
if raises[0] < 2:
|
|
|
|
raise Exception('exception %d' % raises[0])
|
|
|
|
raise SystemExit('exiting exception %d' % raises[0])
|
|
|
|
|
2014-06-06 11:35:34 -07:00
|
|
|
x = expirer.ObjectExpirer({}, logger=self.logger)
|
2011-10-26 21:42:24 +00:00
|
|
|
orig_sleep = expirer.sleep
|
|
|
|
try:
|
|
|
|
expirer.sleep = not_sleep
|
|
|
|
x.run_once = raise_exceptions
|
|
|
|
x.run_forever()
|
2013-08-28 21:16:08 +02:00
|
|
|
except SystemExit as err:
|
2018-12-03 13:59:33 +01:00
|
|
|
self.assertEqual(str(err), 'exiting exception 2')
|
2011-10-26 21:42:24 +00:00
|
|
|
finally:
|
|
|
|
expirer.sleep = orig_sleep
|
2015-03-31 22:35:37 -07:00
|
|
|
self.assertEqual(x.logger.get_lines_for_level('error'),
|
|
|
|
['Unhandled exception: '])
|
|
|
|
log_args, log_kwargs = x.logger.log_dict['error'][0]
|
|
|
|
self.assertEqual(str(log_kwargs['exc_info'][1]),
|
|
|
|
'exception 1')
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_delete_actual_object(self):
|
|
|
|
got_env = [None]
|
|
|
|
|
|
|
|
def fake_app(env, start_response):
|
|
|
|
got_env[0] = env
|
|
|
|
start_response('204 No Content', [('Content-Length', '0')])
|
|
|
|
return []
|
|
|
|
|
2014-02-27 18:30:15 -08:00
|
|
|
internal_client.loadapp = lambda *a, **kw: fake_app
|
2012-05-04 18:07:22 +00:00
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
x = expirer.ObjectExpirer({})
|
2018-01-26 07:54:31 +00:00
|
|
|
ts = Timestamp('1234')
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object('/path/to/object', ts, False)
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
|
2016-02-22 18:03:48 -08:00
|
|
|
self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'],
|
|
|
|
got_env[0]['HTTP_X_IF_DELETE_AT'])
|
2019-03-27 15:28:50 -07:00
|
|
|
self.assertEqual(
|
|
|
|
got_env[0]['HTTP_X_BACKEND_CLEAN_EXPIRING_OBJECT_QUEUE'], 'no')
|
|
|
|
|
|
|
|
def test_delete_actual_object_bulk(self):
|
|
|
|
got_env = [None]
|
|
|
|
|
|
|
|
def fake_app(env, start_response):
|
|
|
|
got_env[0] = env
|
|
|
|
start_response('204 No Content', [('Content-Length', '0')])
|
|
|
|
return []
|
|
|
|
|
|
|
|
internal_client.loadapp = lambda *a, **kw: fake_app
|
|
|
|
|
|
|
|
x = expirer.ObjectExpirer({})
|
|
|
|
ts = Timestamp('1234')
|
|
|
|
x.delete_actual_object('/path/to/object', ts, True)
|
|
|
|
self.assertNotIn('HTTP_X_IF_DELETE_AT', got_env[0])
|
|
|
|
self.assertNotIn('HTTP_X_BACKEND_CLEAN_EXPIRING_OBJECT_QUEUE',
|
|
|
|
got_env[0])
|
|
|
|
self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'], ts.internal)
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2012-11-07 21:49:26 +00:00
|
|
|
def test_delete_actual_object_nourlquoting(self):
|
|
|
|
# delete_actual_object should not do its own url quoting because
|
|
|
|
# internal client's make_request handles that.
|
|
|
|
got_env = [None]
|
|
|
|
|
|
|
|
def fake_app(env, start_response):
|
|
|
|
got_env[0] = env
|
|
|
|
start_response('204 No Content', [('Content-Length', '0')])
|
|
|
|
return []
|
|
|
|
|
2014-02-27 18:30:15 -08:00
|
|
|
internal_client.loadapp = lambda *a, **kw: fake_app
|
2012-11-07 21:49:26 +00:00
|
|
|
|
|
|
|
x = expirer.ObjectExpirer({})
|
2018-01-26 07:54:31 +00:00
|
|
|
ts = Timestamp('1234')
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object('/path/to/object name', ts, False)
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
|
2016-02-22 18:03:48 -08:00
|
|
|
self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'],
|
|
|
|
got_env[0]['HTTP_X_IF_DELETE_AT'])
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(got_env[0]['PATH_INFO'], '/v1/path/to/object name')
|
2012-11-07 21:49:26 +00:00
|
|
|
|
2019-03-27 15:28:50 -07:00
|
|
|
def test_delete_actual_object_async_returns_expected_error(self):
|
|
|
|
def do_test(test_status, should_raise):
|
|
|
|
calls = [0]
|
|
|
|
|
|
|
|
def fake_app(env, start_response):
|
|
|
|
calls[0] += 1
|
|
|
|
calls.append(env['PATH_INFO'])
|
|
|
|
start_response(test_status, [('Content-Length', '0')])
|
|
|
|
return []
|
|
|
|
|
|
|
|
internal_client.loadapp = lambda *a, **kw: fake_app
|
|
|
|
|
|
|
|
x = expirer.ObjectExpirer({})
|
|
|
|
ts = Timestamp('1234')
|
|
|
|
if should_raise:
|
|
|
|
with self.assertRaises(internal_client.UnexpectedResponse):
|
|
|
|
x.delete_actual_object('/path/to/object', ts, True)
|
|
|
|
else:
|
|
|
|
x.delete_actual_object('/path/to/object', ts, True)
|
|
|
|
self.assertEqual(calls[0], 1, calls)
|
|
|
|
|
|
|
|
# object was deleted and tombstone reaped
|
|
|
|
do_test('404 Not Found', False)
|
|
|
|
# object was overwritten *after* the original delete, or
|
|
|
|
# object was deleted but tombstone still exists, or ...
|
|
|
|
do_test('409 Conflict', False)
|
|
|
|
# Anything else, raise
|
|
|
|
do_test('400 Bad Request', True)
|
|
|
|
|
2018-01-16 01:07:35 +00:00
|
|
|
def test_delete_actual_object_returns_expected_error(self):
|
2018-01-17 22:07:26 +00:00
|
|
|
def do_test(test_status, should_raise):
|
2018-01-16 01:07:35 +00:00
|
|
|
calls = [0]
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-01-16 01:07:35 +00:00
|
|
|
def fake_app(env, start_response):
|
|
|
|
calls[0] += 1
|
|
|
|
start_response(test_status, [('Content-Length', '0')])
|
|
|
|
return []
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-01-16 01:07:35 +00:00
|
|
|
internal_client.loadapp = lambda *a, **kw: fake_app
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2018-01-16 01:07:35 +00:00
|
|
|
x = expirer.ObjectExpirer({})
|
2018-01-26 07:54:31 +00:00
|
|
|
ts = Timestamp('1234')
|
2018-01-17 22:07:26 +00:00
|
|
|
if should_raise:
|
|
|
|
with self.assertRaises(internal_client.UnexpectedResponse):
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object('/path/to/object', ts, False)
|
2018-01-17 22:07:26 +00:00
|
|
|
else:
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object('/path/to/object', ts, False)
|
2018-01-16 01:07:35 +00:00
|
|
|
self.assertEqual(calls[0], 1)
|
2012-05-04 18:07:22 +00:00
|
|
|
|
2018-01-16 01:07:35 +00:00
|
|
|
# object was deleted and tombstone reaped
|
2018-01-17 22:07:26 +00:00
|
|
|
do_test('404 Not Found', True)
|
2018-01-16 01:07:35 +00:00
|
|
|
# object was overwritten *after* the original expiration, or
|
2018-01-17 22:07:26 +00:00
|
|
|
do_test('409 Conflict', False)
|
2018-01-16 01:07:35 +00:00
|
|
|
# object was deleted but tombstone still exists, or
|
|
|
|
# object was overwritten ahead of the original expiration, or
|
|
|
|
# object was POSTed to with a new (or no) expiration, or ...
|
2018-01-17 22:07:26 +00:00
|
|
|
do_test('412 Precondition Failed', True)
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_delete_actual_object_does_not_handle_odd_stuff(self):
|
|
|
|
|
|
|
|
def fake_app(env, start_response):
|
2013-08-31 23:42:43 -04:00
|
|
|
start_response(
|
|
|
|
'503 Internal Server Error',
|
2012-05-04 18:07:22 +00:00
|
|
|
[('Content-Length', '0')])
|
2011-10-26 21:42:24 +00:00
|
|
|
return []
|
|
|
|
|
2014-02-27 18:30:15 -08:00
|
|
|
internal_client.loadapp = lambda *a, **kw: fake_app
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
x = expirer.ObjectExpirer({})
|
|
|
|
exc = None
|
|
|
|
try:
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object('/path/to/object', Timestamp('1234'), False)
|
2013-08-28 21:16:08 +02:00
|
|
|
except Exception as err:
|
2011-10-26 21:42:24 +00:00
|
|
|
exc = err
|
|
|
|
finally:
|
2012-05-04 18:07:22 +00:00
|
|
|
pass
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(503, exc.resp.status_int)
|
2011-10-26 21:42:24 +00:00
|
|
|
|
2013-04-15 22:12:23 +00:00
|
|
|
def test_delete_actual_object_quotes(self):
|
|
|
|
name = 'this name should get quoted'
|
2018-01-26 07:54:31 +00:00
|
|
|
timestamp = Timestamp('1366063156.863045')
|
2013-04-15 22:12:23 +00:00
|
|
|
x = expirer.ObjectExpirer({})
|
2018-01-16 01:07:35 +00:00
|
|
|
x.swift.make_request = mock.Mock()
|
|
|
|
x.swift.make_request.return_value.status_int = 204
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object(name, timestamp, False)
|
2015-07-10 13:04:44 +02:00
|
|
|
self.assertEqual(x.swift.make_request.call_count, 1)
|
2013-05-01 21:11:25 +00:00
|
|
|
self.assertEqual(x.swift.make_request.call_args[0][1],
|
2015-10-08 15:03:52 +02:00
|
|
|
'/v1/' + urllib.parse.quote(name))
|
2013-04-15 22:12:23 +00:00
|
|
|
|
2018-01-09 19:19:36 -08:00
|
|
|
def test_delete_actual_object_queue_cleaning(self):
|
|
|
|
name = 'something'
|
2018-01-26 07:54:31 +00:00
|
|
|
timestamp = Timestamp('1515544858.80602')
|
2018-01-09 19:19:36 -08:00
|
|
|
x = expirer.ObjectExpirer({})
|
|
|
|
x.swift.make_request = mock.MagicMock()
|
2019-03-27 15:28:50 -07:00
|
|
|
x.delete_actual_object(name, timestamp, False)
|
2018-01-09 19:19:36 -08:00
|
|
|
self.assertEqual(x.swift.make_request.call_count, 1)
|
|
|
|
header = 'X-Backend-Clean-Expiring-Object-Queue'
|
|
|
|
self.assertEqual(
|
|
|
|
x.swift.make_request.call_args[0][2].get(header),
|
|
|
|
'no')
|
|
|
|
|
2014-06-06 11:35:34 -07:00
|
|
|
def test_pop_queue(self):
|
|
|
|
x = expirer.ObjectExpirer({}, logger=self.logger,
|
2018-01-25 10:49:00 +00:00
|
|
|
swift=FakeInternalClient({}))
|
2014-06-06 11:35:34 -07:00
|
|
|
requests = []
|
|
|
|
|
|
|
|
def capture_requests(ipaddr, port, method, path, *args, **kwargs):
|
|
|
|
requests.append((method, path))
|
|
|
|
with mocked_http_conn(
|
|
|
|
200, 200, 200, give_connect=capture_requests) as fake_conn:
|
2018-02-13 09:40:18 +00:00
|
|
|
x.pop_queue('a', 'c', 'o')
|
2019-05-14 17:10:54 -07:00
|
|
|
with self.assertRaises(StopIteration):
|
|
|
|
next(fake_conn.code_iter)
|
2014-06-06 11:35:34 -07:00
|
|
|
for method, path in requests:
|
|
|
|
self.assertEqual(method, 'DELETE')
|
|
|
|
device, part, account, container, obj = utils.split_path(
|
|
|
|
path, 5, 5, True)
|
2018-02-13 09:40:18 +00:00
|
|
|
self.assertEqual(account, 'a')
|
2014-06-06 11:35:34 -07:00
|
|
|
self.assertEqual(container, 'c')
|
|
|
|
self.assertEqual(obj, 'o')
|
|
|
|
|
2019-05-22 16:23:42 -05:00
|
|
|
def test_build_task_obj_round_trip(self):
|
|
|
|
ts = next(self.ts)
|
|
|
|
a = 'a1'
|
|
|
|
c = 'c2'
|
|
|
|
o = 'obj1'
|
|
|
|
args = (ts, a, c, o)
|
|
|
|
self.assertEqual(args, expirer.parse_task_obj(
|
|
|
|
expirer.build_task_obj(ts, a, c, o)))
|
|
|
|
|
2013-04-15 22:12:23 +00:00
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|