Add a Timestamp.zero() method
...and use it to achieve better abstraction of a fixed default timestamp. Change-Id: If871696999b1288ac5e9cba871bb818422062ffc Signed-off-by: Alistair Coles <alistairncoles@gmail.com>
This commit is contained in:
@@ -146,7 +146,7 @@ class ContainerSyncRealms(object):
|
||||
|
||||
:param request_method: HTTP method of the request.
|
||||
:param path: The path to the resource (url-encoded).
|
||||
:param x_timestamp: The X-Timestamp header value for the request.
|
||||
:param x_timestamp: (str) The X-Timestamp header value for the request.
|
||||
:param nonce: A unique value for the request.
|
||||
:param realm_key: Shared secret at the cluster operator level.
|
||||
:param user_key: Shared secret at the user's container level.
|
||||
|
||||
@@ -413,7 +413,7 @@ class DatabaseBroker(object):
|
||||
END;
|
||||
""")
|
||||
if not put_timestamp:
|
||||
put_timestamp = Timestamp(0).internal
|
||||
put_timestamp = Timestamp.zero().internal
|
||||
self._initialize(conn, put_timestamp,
|
||||
storage_policy_index=storage_policy_index)
|
||||
conn.commit()
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from eventlet import Timeout
|
||||
import swift.common.utils
|
||||
from swift.common.utils.timestamp import Timestamp
|
||||
|
||||
|
||||
class MessageTimeout(Timeout):
|
||||
@@ -93,8 +93,8 @@ class DiskFileDeleted(DiskFileNotExist):
|
||||
|
||||
def __init__(self, metadata=None):
|
||||
self.metadata = metadata or {}
|
||||
self.timestamp = swift.common.utils.Timestamp(
|
||||
self.metadata.get('X-Timestamp', 0))
|
||||
self.timestamp = Timestamp(
|
||||
self.metadata.get('X-Timestamp', Timestamp.zero()))
|
||||
|
||||
|
||||
class DiskFileExpired(DiskFileDeleted):
|
||||
|
||||
@@ -1322,7 +1322,7 @@ class S3Request(swob.Request):
|
||||
reason='invalid_date')
|
||||
|
||||
# Anyways, request timestamp should be validated
|
||||
epoch = S3Timestamp(0)
|
||||
epoch = S3Timestamp.zero()
|
||||
if self.timestamp < epoch:
|
||||
raise AccessDenied(reason='invalid_date')
|
||||
|
||||
|
||||
@@ -124,8 +124,18 @@ class Timestamp(object):
|
||||
|
||||
@classmethod
|
||||
def now(cls, offset=0, delta=0):
|
||||
"""
|
||||
Returns an instance of a Timestamp at the current time.
|
||||
"""
|
||||
return cls(time.time(), offset=offset, delta=delta)
|
||||
|
||||
@classmethod
|
||||
def zero(cls):
|
||||
"""
|
||||
Returns an instance of the smallest possible Timestamp.
|
||||
"""
|
||||
return cls(0)
|
||||
|
||||
def __repr__(self):
|
||||
return INTERNAL_FORMAT % (self.timestamp, self.offset)
|
||||
|
||||
|
||||
@@ -588,7 +588,7 @@ class ContainerBroker(DatabaseBroker):
|
||||
:param storage_policy_index: storage policy index
|
||||
"""
|
||||
if put_timestamp is None:
|
||||
put_timestamp = Timestamp(0).internal
|
||||
put_timestamp = Timestamp.zero().internal
|
||||
# The container_stat view is for compatibility; old versions of Swift
|
||||
# expected a container_stat table with columns "object_count" and
|
||||
# "bytes_used", but when that stuff became per-storage-policy and
|
||||
|
||||
@@ -77,7 +77,7 @@ def cmp_policy_info(info, remote_info):
|
||||
|
||||
def has_been_recreated(info):
|
||||
return (info['put_timestamp'] > info['delete_timestamp'] >
|
||||
Timestamp(0))
|
||||
Timestamp.zero())
|
||||
|
||||
remote_recreated = has_been_recreated(remote_info)
|
||||
recreated = has_been_recreated(info)
|
||||
@@ -112,7 +112,7 @@ def incorrect_policy_index(info, remote_info):
|
||||
|
||||
|
||||
def translate_container_headers_to_info(headers):
|
||||
default_timestamp = Timestamp(0).internal
|
||||
default_timestamp = Timestamp.zero().internal
|
||||
return {
|
||||
'storage_policy_index': int(headers['X-Backend-Storage-Policy-Index']),
|
||||
'put_timestamp': headers.get('x-backend-put-timestamp',
|
||||
@@ -538,7 +538,8 @@ class ContainerReconciler(Daemon):
|
||||
'determine the destination timestamp, if any',
|
||||
path, q_ts)
|
||||
return False
|
||||
dest_ts = Timestamp(dest_obj.get('x-backend-timestamp', 0))
|
||||
dest_ts = Timestamp(
|
||||
dest_obj.get('x-backend-timestamp', Timestamp.zero()))
|
||||
if dest_ts >= q_ts:
|
||||
self.stats_log('found_object', '%r (%f) in policy_index %s '
|
||||
'is newer than queue (%f)', path, dest_ts,
|
||||
@@ -566,7 +567,8 @@ class ContainerReconciler(Daemon):
|
||||
source_obj_info = {}
|
||||
source_obj_iter = None
|
||||
|
||||
source_ts = Timestamp(source_obj_info.get('x-backend-timestamp', 0))
|
||||
source_ts = Timestamp(
|
||||
source_obj_info.get('x-backend-timestamp', Timestamp.zero()))
|
||||
if source_obj_status == 404 and q_op == 'DELETE':
|
||||
return self.ensure_tombstone_in_right_location(
|
||||
q_policy_index, account, container, obj, q_ts, path,
|
||||
|
||||
@@ -60,15 +60,17 @@ def gen_resp_headers(info, is_deleted=False):
|
||||
"""
|
||||
Convert container info dict to headers.
|
||||
"""
|
||||
ts_zero = Timestamp.zero()
|
||||
# backend headers are always included
|
||||
headers = {
|
||||
'X-Backend-Timestamp': Timestamp(info.get('created_at', 0)).internal,
|
||||
'X-Backend-PUT-Timestamp': Timestamp(info.get(
|
||||
'put_timestamp', 0)).internal,
|
||||
'X-Backend-Timestamp': Timestamp(
|
||||
info.get('created_at', ts_zero)).internal,
|
||||
'X-Backend-PUT-Timestamp': Timestamp(
|
||||
info.get('put_timestamp', ts_zero)).internal,
|
||||
'X-Backend-DELETE-Timestamp': Timestamp(
|
||||
info.get('delete_timestamp', 0)).internal,
|
||||
info.get('delete_timestamp', ts_zero)).internal,
|
||||
'X-Backend-Status-Changed-At': Timestamp(
|
||||
info.get('status_changed_at', 0)).internal,
|
||||
info.get('status_changed_at', ts_zero)).internal,
|
||||
'X-Backend-Storage-Policy-Index': info.get('storage_policy_index', 0),
|
||||
}
|
||||
if not is_deleted:
|
||||
@@ -76,9 +78,9 @@ def gen_resp_headers(info, is_deleted=False):
|
||||
headers.update({
|
||||
'X-Container-Object-Count': info.get('object_count', 0),
|
||||
'X-Container-Bytes-Used': info.get('bytes_used', 0),
|
||||
'X-Timestamp': Timestamp(info.get('created_at', 0)).normal,
|
||||
'X-Timestamp': Timestamp(info.get('created_at', ts_zero)).normal,
|
||||
'X-PUT-Timestamp': Timestamp(
|
||||
info.get('put_timestamp', 0)).normal,
|
||||
info.get('put_timestamp', ts_zero)).normal,
|
||||
'X-Backend-Sharding-State': info.get('db_state', UNSHARDED),
|
||||
})
|
||||
return headers
|
||||
|
||||
@@ -493,7 +493,8 @@ class ContainerSync(Daemon):
|
||||
proxy=self.select_http_proxy(),
|
||||
logger=self.logger,
|
||||
retries=0)
|
||||
remote_ts = Timestamp(metadata.get('x-timestamp', 0))
|
||||
remote_ts = Timestamp(
|
||||
metadata.get('x-timestamp', Timestamp.zero()))
|
||||
self.logger.debug("remote obj timestamp %s local obj %s" %
|
||||
(timestamp.internal, remote_ts.internal))
|
||||
if timestamp <= remote_ts:
|
||||
@@ -590,7 +591,8 @@ class ContainerSync(Daemon):
|
||||
row['name']))
|
||||
return True
|
||||
|
||||
timestamp = Timestamp(headers.get('x-timestamp', 0))
|
||||
timestamp = Timestamp(
|
||||
headers.get('x-timestamp', Timestamp.zero()))
|
||||
if timestamp < ts_meta:
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
@@ -689,7 +689,8 @@ class ObjectController(BaseStorageServer):
|
||||
return HTTPNotFound(request=request)
|
||||
except DiskFileStateChanged:
|
||||
return HTTPServiceUnavailable(request=request)
|
||||
orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0))
|
||||
orig_timestamp = Timestamp(
|
||||
orig_metadata.get('X-Timestamp', Timestamp.zero()))
|
||||
orig_ctype_timestamp = disk_file.content_type_timestamp
|
||||
req_ctype_time = '0'
|
||||
req_ctype = request.headers.get('Content-Type')
|
||||
@@ -847,7 +848,7 @@ class ObjectController(BaseStorageServer):
|
||||
except (DiskFileNotExist, DiskFileQuarantined,
|
||||
DiskFileStateChanged):
|
||||
orig_metadata = {}
|
||||
orig_timestamp = Timestamp(0)
|
||||
orig_timestamp = Timestamp.zero()
|
||||
# Checks for If-None-Match
|
||||
if request.if_none_match is not None and orig_metadata:
|
||||
if '*' in request.if_none_match:
|
||||
@@ -1309,7 +1310,8 @@ class ObjectController(BaseStorageServer):
|
||||
else:
|
||||
response_class = HTTPConflict
|
||||
response_timestamp = max(orig_timestamp, req_timestamp)
|
||||
orig_delete_at = Timestamp(orig_metadata.get('X-Delete-At') or 0)
|
||||
orig_delete_at = Timestamp(
|
||||
orig_metadata.get('X-Delete-At') or Timestamp.zero())
|
||||
try:
|
||||
req_if_delete_at_val = request.headers['x-if-delete-at']
|
||||
req_if_delete_at = Timestamp(req_if_delete_at_val)
|
||||
|
||||
@@ -1176,7 +1176,8 @@ class GetterSource(object):
|
||||
return Timestamp(self.resp.getheader('x-backend-data-timestamp') or
|
||||
self.resp.getheader('x-backend-timestamp') or
|
||||
self.resp.getheader('x-put-timestamp') or
|
||||
self.resp.getheader('x-timestamp') or 0)
|
||||
self.resp.getheader('x-timestamp') or
|
||||
Timestamp.zero())
|
||||
|
||||
@property
|
||||
def parts_iter(self):
|
||||
@@ -1412,7 +1413,7 @@ class GetOrHeadHandler(GetterBase):
|
||||
self.used_nodes = []
|
||||
self.used_source_etag = None
|
||||
self.concurrency = concurrency
|
||||
self.latest_404_timestamp = Timestamp(0)
|
||||
self.latest_404_timestamp = Timestamp.zero()
|
||||
policy_options = self.app.get_policy_options(self.policy)
|
||||
self.rebalance_missing_suppression_count = min(
|
||||
policy_options.rebalance_missing_suppression_count,
|
||||
@@ -1601,7 +1602,8 @@ class GetOrHeadHandler(GetterBase):
|
||||
src_headers.get('x-backend-data-timestamp') or
|
||||
src_headers.get('x-backend-timestamp') or
|
||||
src_headers.get('x-put-timestamp') or
|
||||
src_headers.get('x-timestamp') or 0)
|
||||
src_headers.get('x-timestamp') or
|
||||
Timestamp.zero())
|
||||
if ps_timestamp >= self.latest_404_timestamp:
|
||||
self.statuses.append(possible_source.status)
|
||||
self.reasons.append(possible_source.reason)
|
||||
@@ -1615,14 +1617,16 @@ class GetOrHeadHandler(GetterBase):
|
||||
if 'handoff_index' in node and \
|
||||
(is_server_error(possible_source.status) or
|
||||
possible_source.status == HTTP_NOT_FOUND) and \
|
||||
not Timestamp(src_headers.get('x-backend-timestamp', 0)):
|
||||
not Timestamp(src_headers.get('x-backend-timestamp',
|
||||
Timestamp.zero())):
|
||||
# throw out 5XX and 404s from handoff nodes unless the data is
|
||||
# really on disk and had been DELETEd
|
||||
return False
|
||||
|
||||
if self.rebalance_missing_suppression_count > 0 and \
|
||||
possible_source.status == HTTP_NOT_FOUND and \
|
||||
not Timestamp(src_headers.get('x-backend-timestamp', 0)):
|
||||
not Timestamp(src_headers.get('x-backend-timestamp',
|
||||
Timestamp.zero())):
|
||||
self.rebalance_missing_suppression_count -= 1
|
||||
return False
|
||||
|
||||
@@ -1638,7 +1642,8 @@ class GetOrHeadHandler(GetterBase):
|
||||
if self.server_type == 'Object' and \
|
||||
possible_source.status == HTTP_NOT_FOUND:
|
||||
hdrs = HeaderKeyDict(possible_source.getheaders())
|
||||
ts = Timestamp(hdrs.get('X-Backend-Timestamp', 0))
|
||||
ts = Timestamp(hdrs.get('X-Backend-Timestamp',
|
||||
Timestamp.zero()))
|
||||
if ts > self.latest_404_timestamp:
|
||||
self.latest_404_timestamp = ts
|
||||
self.app.check_response(node, self.server_type, possible_source,
|
||||
|
||||
@@ -2705,7 +2705,8 @@ class ECFragGetter(GetterBase):
|
||||
if 'handoff_index' in node and \
|
||||
(is_server_error(possible_source.status) or
|
||||
possible_source.status == HTTP_NOT_FOUND) and \
|
||||
not Timestamp(src_headers.get('x-backend-timestamp', 0)):
|
||||
not Timestamp(
|
||||
src_headers.get('x-backend-timestamp', Timestamp.zero())):
|
||||
# throw out 5XX and 404s from handoff nodes unless the data is
|
||||
# really on disk and had been DELETEd
|
||||
self.logger.debug('Ignoring %s from handoff' %
|
||||
@@ -3010,7 +3011,9 @@ class ECObjectController(BaseObjectController):
|
||||
t_obj = bad_resp_headers.get(
|
||||
'X-Backend-Timestamp',
|
||||
bad_resp_headers.get('X-Timestamp'))
|
||||
bad_ts = Timestamp(t_data_file or t_obj or '0')
|
||||
bad_ts = Timestamp(t_data_file or
|
||||
t_obj or
|
||||
Timestamp.zero())
|
||||
if bad_ts <= best_bucket.timestamp:
|
||||
# We have reason to believe there's still good data
|
||||
# out there, it's just currently unavailable
|
||||
@@ -3018,7 +3021,8 @@ class ECObjectController(BaseObjectController):
|
||||
if getter.status:
|
||||
timestamp = Timestamp(getter.last_headers.get(
|
||||
'X-Backend-Timestamp',
|
||||
getter.last_headers.get('X-Timestamp', 0)))
|
||||
getter.last_headers.get(
|
||||
'X-Timestamp', Timestamp.zero())))
|
||||
if (rebalance_missing_suppression_count > 0 and
|
||||
getter.status == HTTP_NOT_FOUND and
|
||||
not timestamp):
|
||||
|
||||
@@ -23,6 +23,7 @@ from unittest import mock
|
||||
|
||||
from swift.common import swob
|
||||
from swift.common.middleware import container_sync
|
||||
from swift.common.utils.timestamp import Timestamp
|
||||
from swift.proxy.controllers.base import get_cache_key
|
||||
from swift.proxy.controllers.info import InfoController
|
||||
|
||||
@@ -255,6 +256,24 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
self.assertIn('swift.slo_override', req.environ)
|
||||
self.assertIn('swift.symlink_override', req.environ)
|
||||
|
||||
def test_valid_sig3(self):
|
||||
ts = Timestamp.zero()
|
||||
sig = self.sync.realms_conf.get_sig(
|
||||
'GET', '/v1/a/c', ts.internal, 'nonce',
|
||||
self.sync.realms_conf.key2('US'), 'abc')
|
||||
req = swob.Request.blank(
|
||||
'/v1/a/c', headers={'x-container-sync-auth': 'US nonce ' + sig,
|
||||
'x-timestamp': ts.internal})
|
||||
infocache = req.environ.setdefault('swift.infocache', {})
|
||||
infocache[get_cache_key('a', 'c')] = {'sync_key': 'abc'}
|
||||
resp = req.get_response(self.sync)
|
||||
self.assertEqual(resp.status, '200 OK')
|
||||
self.assertEqual(resp.body, b'Response to Authorized Request')
|
||||
self.assertIn('cs:valid', req.environ.get('swift.log_info'))
|
||||
self.assertIn('swift.authorize_override', req.environ)
|
||||
self.assertIn('swift.slo_override', req.environ)
|
||||
self.assertIn('swift.symlink_override', req.environ)
|
||||
|
||||
def test_info(self):
|
||||
req = swob.Request.blank('/info')
|
||||
resp = req.get_response(self.sync)
|
||||
|
||||
@@ -353,6 +353,31 @@ class TestExampleBroker(TestDbBase):
|
||||
def setUp(self):
|
||||
super(TestExampleBroker, self).setUp()
|
||||
|
||||
def test_initialize(self):
|
||||
broker = self.broker_class(self.db_path, account='a', container='c')
|
||||
put_timestamp = next(self.ts)
|
||||
created_at = next(self.ts)
|
||||
with patch('swift.common.db.Timestamp.now', return_value=created_at):
|
||||
broker.initialize(put_timestamp.internal)
|
||||
info = broker.get_info()
|
||||
self.assertEqual(info['created_at'], created_at.internal)
|
||||
self.assertEqual(info['put_timestamp'], put_timestamp.internal)
|
||||
self.assertEqual(info['delete_timestamp'], '0')
|
||||
self.assertEqual(info['status_changed_at'], put_timestamp.internal)
|
||||
self.assertFalse(broker.is_deleted())
|
||||
|
||||
def test_initialize_default_put_timestamp(self):
|
||||
broker = self.broker_class(self.db_path, account='a', container='c')
|
||||
created_at = next(self.ts)
|
||||
with patch('swift.common.db.Timestamp.now', return_value=created_at):
|
||||
broker.initialize()
|
||||
info = broker.get_info()
|
||||
self.assertEqual(info['created_at'], created_at.internal)
|
||||
self.assertEqual(info['put_timestamp'], Timestamp.zero().internal)
|
||||
self.assertEqual(info['delete_timestamp'], '0')
|
||||
self.assertEqual(info['status_changed_at'], Timestamp.zero().internal)
|
||||
self.assertFalse(broker.is_deleted())
|
||||
|
||||
def test_delete_db(self):
|
||||
broker = self.broker_class(self.db_path, account='a', container='c')
|
||||
broker.initialize(next(self.ts).internal)
|
||||
|
||||
@@ -25,12 +25,25 @@ from swift.common.utils import timestamp
|
||||
|
||||
class TestTimestamp(unittest.TestCase):
|
||||
"""Tests for swift.common.utils.timestamp.Timestamp"""
|
||||
def test_zero(self):
|
||||
ts_zero = timestamp.Timestamp.zero()
|
||||
self.assertEqual(0.0, float(ts_zero))
|
||||
self.assertEqual(0, ts_zero.offset)
|
||||
self.assertEqual(timestamp.Timestamp.zero(),
|
||||
timestamp.Timestamp(timestamp.Timestamp.zero()))
|
||||
self.assertEqual(ts_zero.internal, '0000000000.00000')
|
||||
# for now this is true...
|
||||
self.assertEqual(timestamp.Timestamp(0), ts_zero)
|
||||
|
||||
def test_invalid_input(self):
|
||||
with self.assertRaises(ValueError):
|
||||
timestamp.Timestamp(time.time(), offset=-1)
|
||||
with self.assertRaises(ValueError):
|
||||
timestamp.Timestamp('123.456_78_90')
|
||||
with self.assertRaises(ValueError):
|
||||
timestamp.Timestamp('')
|
||||
with self.assertRaises(TypeError):
|
||||
timestamp.Timestamp(None)
|
||||
|
||||
def test_invalid_string_conversion(self):
|
||||
t = timestamp.Timestamp.now()
|
||||
|
||||
Reference in New Issue
Block a user